diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/events/TabularFldChangeEvent.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/events/TabularFldChangeEvent.java new file mode 100644 index 0000000..a50654c --- /dev/null +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/events/TabularFldChangeEvent.java @@ -0,0 +1,66 @@ +package org.gcube.portlets.user.dataminermanager.client.events; + +import org.gcube.portlets.user.dataminermanager.shared.data.TableItemSimple; + +import com.google.gwt.event.shared.EventHandler; +import com.google.gwt.event.shared.GwtEvent; +import com.google.gwt.event.shared.HandlerRegistration; +import com.google.gwt.event.shared.HasHandlers; + +/** + * Start Computation Event + * + * + * @author "Giancarlo Panichi" g.panichi@isti.cnr.it + * + */ +public class TabularFldChangeEvent extends + GwtEvent { + + public static Type TYPE = new Type(); + private TableItemSimple tableItemSimple; + + public interface TabularFldChangeEventHandler extends EventHandler { + void onChange(TabularFldChangeEvent event); + } + + public interface HasTabularFldChangeEventHandler extends HasHandlers { + public HandlerRegistration addTabularFldChangeEventHandler( + TabularFldChangeEventHandler handler); + } + + public TabularFldChangeEvent(TableItemSimple tableItemSimple) { + this.tableItemSimple = tableItemSimple; + + } + + @Override + protected void dispatch(TabularFldChangeEventHandler handler) { + handler.onChange(this); + } + + @Override + public Type getAssociatedType() { + return TYPE; + } + + public static Type getType() { + return TYPE; + } + + public static void fire(HasHandlers source, TabularFldChangeEvent event) { + source.fireEvent(event); + } + + public TableItemSimple getTableItemSimple() { + return tableItemSimple; + } + + @Override + public String toString() { + return "TabularFldChangeEvent [tableItemSimple=" + tableItemSimple + + "]"; + } + +} diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/experimentArea/ComputationPanel.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/experimentArea/ComputationPanel.java index 2d3e8de..87a5765 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/experimentArea/ComputationPanel.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/experimentArea/ComputationPanel.java @@ -2,11 +2,9 @@ package org.gcube.portlets.user.dataminermanager.client.experimentArea; import org.gcube.portlets.user.dataminermanager.client.DataMinerManager; import org.gcube.portlets.user.dataminermanager.client.bean.Operator; -import org.gcube.portlets.user.dataminermanager.client.events.ComponentRefreshLayoutEvent; import org.gcube.portlets.user.dataminermanager.client.events.StartComputationEvent; -import org.gcube.portlets.user.dataminermanager.client.events.ComponentRefreshLayoutEvent.ComponentRefreshLayoutEventHandler; -import org.gcube.portlets.user.dataminermanager.client.events.StartComputationEvent.StartComputationEventHandler; import org.gcube.portlets.user.dataminermanager.client.events.StartComputationEvent.HasStartComputationEventHandler; +import org.gcube.portlets.user.dataminermanager.client.events.StartComputationEvent.StartComputationEventHandler; import org.gcube.portlets.user.dataminermanager.client.experimentArea.ComputationParametersPanel.ComputationParametersPanelHandler; import com.google.gwt.event.shared.HandlerRegistration; @@ -93,7 +91,7 @@ public class ComputationPanel extends FramedPanel implements v.clear(); CenterLayoutContainer centerContainer = new CenterLayoutContainer(); centerContainer.add(new HTML( - "

Select an operator.

")); + "Select an operator.")); v.add(centerContainer, new VerticalLayoutData(1, 1, new Margins(0))); forceLayout(); @@ -111,6 +109,7 @@ public class ComputationPanel extends FramedPanel implements v.add(computationParametersPanel, new VerticalLayoutData(1, -1, new Margins(0,5,5,5))); removeAllButton.setEnabled(true); + forceLayout(); } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/experimentArea/ComputationParametersPanel.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/experimentArea/ComputationParametersPanel.java index 0a94ec9..a8f4309 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/experimentArea/ComputationParametersPanel.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/experimentArea/ComputationParametersPanel.java @@ -13,10 +13,9 @@ import org.gcube.portlets.user.dataminermanager.client.bean.Operator; import org.gcube.portlets.user.dataminermanager.client.bean.parameters.ColumnListParameter; import org.gcube.portlets.user.dataminermanager.client.bean.parameters.ColumnParameter; import org.gcube.portlets.user.dataminermanager.client.bean.parameters.Parameter; -import org.gcube.portlets.user.dataminermanager.client.events.ComponentRefreshLayoutEvent; -import org.gcube.portlets.user.dataminermanager.client.events.ComponentRefreshLayoutEvent.ComponentRefreshLayoutEventHandler; -import org.gcube.portlets.user.dataminermanager.client.events.ComponentRefreshLayoutEvent.HasComponentRefreshLayoutEventHandler; import org.gcube.portlets.user.dataminermanager.client.parametersfield.AbstractFld; +import org.gcube.portlets.user.dataminermanager.client.parametersfield.ColumnFld; +import org.gcube.portlets.user.dataminermanager.client.parametersfield.ColumnListFld; import org.gcube.portlets.user.dataminermanager.client.parametersfield.OperatorFieldWidget; import org.gcube.portlets.user.dataminermanager.client.parametersfield.TabularFld; import org.gcube.portlets.user.dataminermanager.client.util.UtilsGXT3; @@ -24,12 +23,10 @@ import org.gcube.portlets.user.dataminermanager.client.util.UtilsGXT3; import com.allen_sauer.gwt.log.client.Log; import com.google.gwt.core.client.GWT; import com.google.gwt.dom.client.Style.Unit; -import com.google.gwt.event.shared.HandlerRegistration; import com.google.gwt.i18n.client.DateTimeFormat; import com.google.gwt.i18n.client.DateTimeFormat.PredefinedFormat; import com.google.gwt.resources.client.ImageResource; import com.google.gwt.user.client.rpc.AsyncCallback; -import com.google.gwt.user.client.ui.HTML; import com.google.gwt.user.client.ui.Image; import com.sencha.gxt.core.client.util.Margins; import com.sencha.gxt.widget.core.client.button.TextButton; @@ -73,11 +70,7 @@ public class ComputationParametersPanel extends SimpleContainer { private TextField titleField; private String defaultComputationTitle; - /* - * public ComputationParametersPanel(DataMinerPortletServiceAsync service, - * Operator operator, ComputationParametersPanelHandler handler) { - * this(operator); this.setHandler(handler); } - */ + private TextButton submit; public ComputationParametersPanel(Operator operator) { super(); @@ -92,7 +85,6 @@ public class ComputationParametersPanel extends SimpleContainer { } } - private void init() { addStyleName("workflow"); } @@ -105,21 +97,25 @@ public class ComputationParametersPanel extends SimpleContainer { + (operator.hasImage() ? operator.getId() : "DEFAULT_IMAGE") + ".png"); img.setStylePrimaryName("workflow-icon"); - v.add(img, new VerticalLayoutData(-1, -1, new Margins(-10,0,0,5))); - - HtmlLayoutContainer title = new HtmlLayoutContainer(operator.getName()); + v.add(img, new VerticalLayoutData(-1, -1, new Margins(-10, 0, 0, 5))); + + HtmlLayoutContainer title = new HtmlLayoutContainer("" + + operator.getName() + ""); title.addStyleName("workflow-title"); - v.add(title, new VerticalLayoutData(-1, -1, new Margins(15,0,0,-25))); + v.add(title, new VerticalLayoutData(-1, -1, new Margins(15, 0, 0, -25))); String descr = operator.getDescription(); descr = (descr == null || descr.contentEquals("")) ? "no-description" : operator.getDescription(); - HtmlLayoutContainer description = new HtmlLayoutContainer(descr); + HtmlLayoutContainer description = new HtmlLayoutContainer("" + + descr + ""); description.addStyleName("workflow-description"); - v.add(description, new VerticalLayoutData(-1, -1, new Margins(0,0,0,5))); + v.add(description, new VerticalLayoutData(-1, -1, new Margins(0, 5, 0, + 10))); addTitleField(); - + + parametersPanel = new FormPanel() { @Override public boolean isValid(boolean preventMark) { @@ -136,21 +132,55 @@ public class ComputationParametersPanel extends SimpleContainer { } }; + //parametersPanel = new FramedPanel(); + parametersPanel.setDeferHeight(false); parametersPanel.setBorders(false); // parametersPanel.getElement().getStyle().setMargin(20, px); parametersFieldSet = new FieldSet(); parametersFieldSet.setHeadingText("Parameters"); + vParameters = new VerticalLayoutContainer(); + + Image imgLoading = new Image(PRELOAD_IMAGE); + vParameters.add(imgLoading, new VerticalLayoutData(1, -1, new Margins(5))); + parametersFieldSet.add(vParameters, new MarginData(0)); parametersFieldSet.setCollapsible(false); - parametersPanel.add(parametersFieldSet, new MarginData(new Margins(5,5, 5, 5))); - + parametersPanel.add(parametersFieldSet, new MarginData(new Margins(5))); v.add(parametersPanel, new VerticalLayoutData(1, -1, new Margins())); + + submit = new TextButton("Start Computation"); + submit.setToolTip(START_BUTTON_TOOLTIP); + submit.setIcon(DataMinerManager.resources.startComputation()); + + + submit.getElement().getStyle().setMarginLeft(20, Unit.PX); + submit.getElement().getStyle().setMarginBottom(20, Unit.PX); + + submit.addSelectHandler(new SelectHandler() { + + @Override + public void onSelect(SelectEvent event) { + if (handler != null && parametersPanel.isValid()) { + String value = titleField.getValue(); + String title = (value == null || value.contentEquals("")) ? defaultComputationTitle + : value; + handler.startComputation(title, title); // TODO insert + // description + } + + } + }); + + v.add(submit); + //, new VerticalLayoutData(-1, -1, new Margins(0, 0, 20, 20))); + submit.setVisible(false); + + forceLayout(); loadOperatorParameters(); - - fireEvent(new ComponentRefreshLayoutEvent()); + } /** @@ -162,7 +192,7 @@ public class ComputationParametersPanel extends SimpleContainer { titleField.setValue(defaultComputationTitle); FieldLabel titleLabel = new FieldLabel(titleField, "Computation Title"); v.add(titleLabel, new VerticalLayoutData(-1, -1, - new Margins(0,0,0,5))); + new Margins(5, 0, 0, 5))); } /** @@ -185,10 +215,8 @@ public class ComputationParametersPanel extends SimpleContainer { "Impossible to retrieve parameters."); } }); - Image img = new Image(PRELOAD_IMAGE); - img.setStyleName("workflow-parameters-preload"); - vParameters.add(img, new VerticalLayoutData(-1, -1, new Margins())); - forceLayout(); + + } /** @@ -220,7 +248,20 @@ public class ComputationParametersPanel extends SimpleContainer { if (tabularField != null) { AbstractFld field = fieldWidgetsMap .get(p.getName()).getField(); - tabularField.addChangeListener(field); + if (field instanceof ColumnFld) { + ColumnFld columnField = (ColumnFld) field; + tabularField + .addTabularFldChangeEventHandler(columnField); + } else { + if (field instanceof ColumnListFld) { + ColumnListFld columnListField = (ColumnListFld) field; + tabularField + .addTabularFldChangeEventHandler(columnListField); + } else { + + } + } + } } catch (Exception e) { e.printStackTrace(); @@ -234,31 +275,7 @@ public class ComputationParametersPanel extends SimpleContainer { } } - final TextButton submit = new TextButton("Start Computation"); - submit.setToolTip(START_BUTTON_TOOLTIP); - submit.setIcon(DataMinerManager.resources.startComputation()); - submit.getElement().getStyle().setMarginLeft(20, Unit.PX); - submit.getElement().getStyle().setMarginBottom(20, Unit.PX); - - submit.addSelectHandler(new SelectHandler() { - - @Override - public void onSelect(SelectEvent event) { - if (handler != null && parametersPanel.isValid()) { - String value = titleField.getValue(); - String title = (value == null || value - .contentEquals("")) ? defaultComputationTitle - : value; - handler.startComputation(title, title); // TODO insert - // description - } - - } - }); - - //new VerticalLayoutData(-1, -1, new Margins(0, 0, 20,20)) - v.add(submit); - + submit.setVisible(true); forceLayout(); } catch (Throwable e) { Log.error("Error in show form:" + e.getLocalizedMessage()); diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/AbstractFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/AbstractFld.java index 29c7de5..f3b6395 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/AbstractFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/AbstractFld.java @@ -13,12 +13,13 @@ import com.google.gwt.user.client.ui.Widget; * email: g.panichi@isti.cnr.it * */ -public abstract class AbstractFld { +public abstract class AbstractFld { protected Parameter parameter; public abstract String getValue(); public abstract Widget getWidget(); + /** * */ @@ -48,9 +49,6 @@ public abstract class AbstractFld { // (for string, int, float, double and boolean) the built-in validation is enough return true; } - /** - * @param tableItem - */ - public void fireEvent(Object message) { - } + + } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/BooleanFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/BooleanFld.java index d8f9b6d..0f34a5f 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/BooleanFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/BooleanFld.java @@ -7,14 +7,23 @@ import org.gcube.portlets.user.dataminermanager.client.bean.parameters.ObjectPar import org.gcube.portlets.user.dataminermanager.client.bean.parameters.Parameter; import com.google.gwt.user.client.ui.Widget; +import com.sencha.gxt.core.client.util.Margins; +import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutData; +import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutPack; +import com.sencha.gxt.widget.core.client.container.HBoxLayoutContainer; +import com.sencha.gxt.widget.core.client.container.HtmlLayoutContainer; +import com.sencha.gxt.widget.core.client.container.SimpleContainer; import com.sencha.gxt.widget.core.client.form.CheckBox; /** - * @author ceras + * + * @author Giancarlo Panichi email: g.panichi@isti.cnr.it * */ public class BooleanFld extends AbstractFld { + private SimpleContainer fieldContainer; private CheckBox checkBox = new CheckBox(); /** @@ -22,15 +31,39 @@ public class BooleanFld extends AbstractFld { */ public BooleanFld(Parameter parameter) { super(parameter); + fieldContainer = new SimpleContainer(); + HBoxLayoutContainer horiz = new HBoxLayoutContainer(); + horiz.setPack(BoxLayoutPack.START); + horiz.setEnableOverflow(false); - ObjectParameter p = (ObjectParameter)parameter; - checkBox.setValue(!p.getDefaultValue().toUpperCase().equals("FALSE")); - if (p.getDescription()!=null) - checkBox.setTitle(p.getDescription()); + ObjectParameter p = (ObjectParameter) parameter; + + if (p.getDefaultValue() != null) + checkBox.setValue(!p.getDefaultValue().toUpperCase() + .equals("FALSE")); + else checkBox.setValue(false); checkBox.setBoxLabel(p.getName()); + + HtmlLayoutContainer descr; + + if (p.getDescription() == null) { + descr = new HtmlLayoutContainer("

"); + descr.addStyleName("workflow-fieldDescription"); + + } else { + checkBox.setToolTip(p.getDescription()); + descr = new HtmlLayoutContainer("

" + + p.getDescription() + "

"); + descr.addStyleName("workflow-fieldDescription"); + } + + horiz.add(checkBox, new BoxLayoutData(new Margins())); + horiz.add(descr, new BoxLayoutData(new Margins())); + + fieldContainer.add(horiz); + } - @Override public String getValue() { return (checkBox.getValue() ? "true" : "false"); @@ -38,7 +71,7 @@ public class BooleanFld extends AbstractFld { @Override public Widget getWidget() { - return checkBox; + return fieldContainer; } } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ColumnFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ColumnFld.java index 8e73055..a8521d4 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ColumnFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ColumnFld.java @@ -7,16 +7,25 @@ import java.util.ArrayList; import org.gcube.portlets.user.dataminermanager.client.bean.parameters.ColumnParameter; import org.gcube.portlets.user.dataminermanager.client.bean.parameters.Parameter; +import org.gcube.portlets.user.dataminermanager.client.events.TabularFldChangeEvent; +import org.gcube.portlets.user.dataminermanager.client.events.TabularFldChangeEvent.TabularFldChangeEventHandler; import org.gcube.portlets.user.dataminermanager.client.properties.ColumnItemPropertiesCombo; import org.gcube.portlets.user.dataminermanager.shared.data.ColumnItem; import org.gcube.portlets.user.dataminermanager.shared.data.TableItemSimple; import com.google.gwt.core.client.GWT; -import com.google.gwt.user.client.ui.HTML; import com.google.gwt.user.client.ui.Widget; import com.sencha.gxt.cell.core.client.form.ComboBoxCell.TriggerAction; +import com.sencha.gxt.core.client.util.Format; +import com.sencha.gxt.core.client.util.Margins; import com.sencha.gxt.data.shared.ListStore; +import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutData; +import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutPack; +import com.sencha.gxt.widget.core.client.container.HBoxLayoutContainer; +import com.sencha.gxt.widget.core.client.container.HtmlLayoutContainer; +import com.sencha.gxt.widget.core.client.container.SimpleContainer; import com.sencha.gxt.widget.core.client.container.VerticalLayoutContainer; +import com.sencha.gxt.widget.core.client.container.VerticalLayoutContainer.VerticalLayoutData; import com.sencha.gxt.widget.core.client.form.ComboBox; /** @@ -25,9 +34,10 @@ import com.sencha.gxt.widget.core.client.form.ComboBox; * href="mailto:g.panichi@isti.cnr.it">g.panichi@isti.cnr.it * */ -public class ColumnFld extends AbstractFld { +public class ColumnFld extends AbstractFld implements + TabularFldChangeEventHandler { - private VerticalLayoutContainer vp; + //private VerticalLayoutContainer vp; private String defaultColumn; @@ -35,17 +45,28 @@ public class ColumnFld extends AbstractFld { private ListStore store; private String referredTabularParameterName; + private SimpleContainer fieldContainer; + + private SimpleContainer vContainer; + /** * @param parameter */ public ColumnFld(Parameter parameter) { super(parameter); - vp = new VerticalLayoutContainer(); + ColumnParameter p = (ColumnParameter) parameter; - this.referredTabularParameterName = p.getReferredTabularParameterName(); - this.defaultColumn = p.getDefaultColumn(); + + referredTabularParameterName = p.getReferredTabularParameterName(); + defaultColumn = p.getDefaultColumn(); + fieldContainer = new SimpleContainer(); + HBoxLayoutContainer horiz = new HBoxLayoutContainer(); + horiz.setPack(BoxLayoutPack.START); + horiz.setEnableOverflow(false); + + ColumnItemPropertiesCombo props = GWT .create(ColumnItemPropertiesCombo.class); @@ -56,18 +77,62 @@ public class ColumnFld extends AbstractFld { comboBox.setForceSelection(true); comboBox.setEditable(false); comboBox.setTriggerAction(TriggerAction.ALL); - if (p.getDescription() != null) - comboBox.setTitle(p.getDescription()); comboBox.setEnabled(false); + + HtmlLayoutContainer descr; + + if (p.getDescription() == null) { + descr = new HtmlLayoutContainer("

"); + descr.addStyleName("workflow-fieldDescription"); + + } else { + comboBox.setToolTip(p.getDescription()); + descr = new HtmlLayoutContainer("

" + + p.getDescription() + "

"); + descr.addStyleName("workflow-fieldDescription"); + } + + vContainer=new SimpleContainer(); showNoSelectionField(); + horiz.add(vContainer, new BoxLayoutData(new Margins())); + horiz.add(descr, new BoxLayoutData(new Margins())); + + fieldContainer.add(horiz); + fieldContainer.forceLayout(); + + } + private void showNoSelectionField() { + vContainer.clear(); + VerticalLayoutContainer vField = new VerticalLayoutContainer(); + HtmlLayoutContainer typeDescription= new HtmlLayoutContainer( + "

Select table from parameter " + + Format.ellipse(referredTabularParameterName,30) + "

"); + typeDescription.setStylePrimaryName("workflow-parameters-description"); + vField.add(comboBox, new VerticalLayoutData(1,-1,new Margins(0))); + vField.add(typeDescription, new VerticalLayoutData(-1,-1,new Margins(0))); + vContainer.add(vField); + } + + private void showFieldWithSelection(TableItemSimple tableItem) { + vContainer.clear(); + VerticalLayoutContainer vField = new VerticalLayoutContainer(); + HtmlLayoutContainer typeDescription= new HtmlLayoutContainer( + "

Columns of Table " + + Format.ellipse(tableItem.getName(),30) + "

"); + typeDescription.setStylePrimaryName("workflow-parameters-description"); + vField.add(comboBox, new VerticalLayoutData(1,-1,new Margins(0))); + vField.add(typeDescription, new VerticalLayoutData(-1,-1,new Margins(0))); + vContainer.add(vField); + } + + /** * - */ + private void showNoSelectionField() { vp.clear(); - ; vp.add(comboBox); vp.add(new HTML( "
Select table from parameter " @@ -75,9 +140,6 @@ public class ColumnFld extends AbstractFld { vp.forceLayout(); } - /** - * - */ private void showFieldWithSelection(TableItemSimple tableItem) { vp.clear(); vp.add(comboBox); @@ -85,7 +147,7 @@ public class ColumnFld extends AbstractFld { "
Columns of Table " + tableItem.getName() + "
")); vp.forceLayout(); - } + } */ /** * @@ -101,23 +163,22 @@ public class ColumnFld extends AbstractFld { */ @Override public Widget getWidget() { - return vp; + return fieldContainer; } - @Override - public void fireEvent(Object message) { - if (message == null) { + public void onChange(TabularFldChangeEvent event) { + TableItemSimple tableItemSimple = event.getTableItemSimple(); + if (tableItemSimple == null) { store.clear(); store.commitChanges(); comboBox.clear(); comboBox.setEnabled(false); showNoSelectionField(); } else { - TableItemSimple tableItem = (TableItemSimple) message; store.clear(); store.commitChanges(); - ArrayList columns = tableItem.getColumns(); + ArrayList columns = tableItemSimple.getColumns(); if (columns != null) { store.addAll(columns); for (ColumnItem columnItem : columns) { @@ -130,8 +191,9 @@ public class ColumnFld extends AbstractFld { } comboBox.clear(); comboBox.setEnabled(true); - showFieldWithSelection(tableItem); + showFieldWithSelection(tableItemSimple); } + fieldContainer.forceLayout(); } } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ColumnListFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ColumnListFld.java index 9d6a200..de95303 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ColumnListFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ColumnListFld.java @@ -8,6 +8,8 @@ import java.util.List; import org.gcube.portlets.user.dataminermanager.client.bean.parameters.ColumnListParameter; import org.gcube.portlets.user.dataminermanager.client.bean.parameters.Parameter; +import org.gcube.portlets.user.dataminermanager.client.events.TabularFldChangeEvent; +import org.gcube.portlets.user.dataminermanager.client.events.TabularFldChangeEvent.TabularFldChangeEventHandler; import org.gcube.portlets.user.dataminermanager.client.properties.ColumnItemProperties; import org.gcube.portlets.user.dataminermanager.shared.data.ColumnItem; import org.gcube.portlets.user.dataminermanager.shared.data.TableItemSimple; @@ -30,9 +32,9 @@ import com.sencha.gxt.widget.core.client.grid.Grid; * href="mailto:g.panichi@isti.cnr.it">g.panichi@isti.cnr.it * */ -public class ColumnListFld extends AbstractFld { +public class ColumnListFld extends AbstractFld implements + TabularFldChangeEventHandler { - private VerticalLayoutContainer vp; private ColumnListParameter columnListParameter; private ListStore store; @@ -67,9 +69,9 @@ public class ColumnListFld extends AbstractFld { ColumnModel cm = new ColumnModel(l); store = new ListStore(props.id()); - + grid = new Grid(store, cm); - + sm.setSelectionMode(SelectionMode.MULTI); grid.setSelectionModel(sm); // grid.getView().setAutoExpandColumn(labelCol); @@ -82,7 +84,7 @@ public class ColumnListFld extends AbstractFld { grid.setColumnReordering(true); grid.setColumnResize(false); grid.disable(); - + } /** @@ -110,7 +112,6 @@ public class ColumnListFld extends AbstractFld { vp.forceLayout(); } - /** * */ @@ -119,9 +120,9 @@ public class ColumnListFld extends AbstractFld { String separator = columnListParameter.getSeparator(); String value = ""; boolean first = true; - + for (ColumnItem columnItem : sm.getSelection()) { - String columnName = columnItem.getName(); + String columnName = columnItem.getName(); value += (first ? "" : separator) + columnName; first = false; } @@ -136,26 +137,7 @@ public class ColumnListFld extends AbstractFld { return vp; } - /** - * - */ - @Override - public void fireEvent(Object message) { - if (message == null) { - store.clear(); - store.commitChanges(); - grid.disable(); - showNoSelectionField(); - } else { - TableItemSimple tableItem = (TableItemSimple) message; - store.clear(); - store.commitChanges(); - store.addAll(tableItem.getColumns()); - store.commitChanges(); - grid.enable(); - showFieldWithSelection(tableItem); - } - } + /** * @@ -165,4 +147,23 @@ public class ColumnListFld extends AbstractFld { return store.size() > 0; } + @Override + public void onChange(TabularFldChangeEvent event) { + TableItemSimple tableItemSimple = event.getTableItemSimple(); + if (tableItemSimple == null) { + store.clear(); + store.commitChanges(); + grid.disable(); + showNoSelectionField(); + } else { + store.clear(); + store.commitChanges(); + store.addAll(tableItemSimple.getColumns()); + store.commitChanges(); + grid.enable(); + showFieldWithSelection(tableItemSimple); + } + + } + } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/DoubleFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/DoubleFld.java index 5c9eb5e..6b8efcc 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/DoubleFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/DoubleFld.java @@ -4,6 +4,14 @@ import org.gcube.portlets.user.dataminermanager.client.bean.parameters.ObjectPar import org.gcube.portlets.user.dataminermanager.client.bean.parameters.Parameter; import com.google.gwt.user.client.ui.Widget; +import com.sencha.gxt.core.client.util.Margins; +import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutData; +import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutPack; +import com.sencha.gxt.widget.core.client.container.HBoxLayoutContainer; +import com.sencha.gxt.widget.core.client.container.HtmlLayoutContainer; +import com.sencha.gxt.widget.core.client.container.SimpleContainer; +import com.sencha.gxt.widget.core.client.container.VerticalLayoutContainer; +import com.sencha.gxt.widget.core.client.container.VerticalLayoutContainer.VerticalLayoutData; import com.sencha.gxt.widget.core.client.form.DoubleField; /** @@ -13,7 +21,8 @@ import com.sencha.gxt.widget.core.client.form.DoubleField; * */ public class DoubleFld extends AbstractFld { - + + private SimpleContainer fieldContainer; private DoubleField numberField; /** @@ -21,6 +30,10 @@ public class DoubleFld extends AbstractFld { */ public DoubleFld(Parameter parameter) { super(parameter); + fieldContainer=new SimpleContainer(); + HBoxLayoutContainer horiz = new HBoxLayoutContainer(); + horiz.setPack(BoxLayoutPack.START); + horiz.setEnableOverflow(false); ObjectParameter p = (ObjectParameter) parameter; @@ -28,10 +41,37 @@ public class DoubleFld extends AbstractFld { if (p.getDefaultValue() != null) numberField.setValue(Double.parseDouble(p.getDefaultValue())); + else + numberField.setAllowBlank(false); + + HtmlLayoutContainer descr; - if (p.getDescription() != null) - numberField.setTitle(p.getDescription()); + if (p.getDescription() == null) { + descr = new HtmlLayoutContainer("

"); + descr.addStyleName("workflow-fieldDescription"); + } else { + numberField.setToolTip(p.getDescription()); + descr = new HtmlLayoutContainer("

" + + p.getDescription() + "

"); + descr.addStyleName("workflow-fieldDescription"); + } + + SimpleContainer vContainer=new SimpleContainer(); + VerticalLayoutContainer vField = new VerticalLayoutContainer(); + HtmlLayoutContainer typeDescription = new HtmlLayoutContainer( + "Double Value"); + typeDescription.setStylePrimaryName("workflow-parameters-description"); + vField.add(numberField, new VerticalLayoutData(1,-1,new Margins(0))); + vField.add(typeDescription, new VerticalLayoutData(-1,-1,new Margins(0))); + vContainer.add(vField); + + horiz.add(vContainer, new BoxLayoutData(new Margins())); + horiz.add(descr, new BoxLayoutData(new Margins())); + + fieldContainer.add(horiz); + + fieldContainer.forceLayout(); } /** @@ -47,7 +87,7 @@ public class DoubleFld extends AbstractFld { */ @Override public Widget getWidget() { - return numberField; + return fieldContainer; } } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/EnumFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/EnumFld.java index b3155a8..3cad9e2 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/EnumFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/EnumFld.java @@ -8,17 +8,24 @@ import org.gcube.portlets.user.dataminermanager.client.bean.parameters.Parameter import com.google.gwt.user.client.ui.Widget; import com.sencha.gxt.cell.core.client.form.ComboBoxCell.TriggerAction; +import com.sencha.gxt.core.client.util.Margins; import com.sencha.gxt.data.shared.StringLabelProvider; +import com.sencha.gxt.widget.core.client.container.HBoxLayoutContainer; +import com.sencha.gxt.widget.core.client.container.HtmlLayoutContainer; +import com.sencha.gxt.widget.core.client.container.SimpleContainer; +import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutData; +import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutPack; import com.sencha.gxt.widget.core.client.form.SimpleComboBox; /** * - * @author Giancarlo Panichi - * email: g.panichi@isti.cnr.it + * @author Giancarlo Panichi email: g.panichi@isti.cnr.it * */ public class EnumFld extends AbstractFld { + private SimpleContainer fieldContainer; private SimpleComboBox listBox; /** @@ -26,6 +33,10 @@ public class EnumFld extends AbstractFld { */ public EnumFld(Parameter parameter) { super(parameter); + fieldContainer = new SimpleContainer(); + HBoxLayoutContainer horiz = new HBoxLayoutContainer(); + horiz.setPack(BoxLayoutPack.START); + horiz.setEnableOverflow(false); EnumParameter p = (EnumParameter) parameter; @@ -35,11 +46,27 @@ public class EnumFld extends AbstractFld { listBox.setForceSelection(true); listBox.setEditable(false); listBox.setTriggerAction(TriggerAction.ALL); - if (p.getDescription() != null) - listBox.setTitle(p.getDescription()); + if (p.getDefaultValue() != null) listBox.setValue(p.getDefaultValue()); - + + HtmlLayoutContainer descr; + + if (p.getDescription() == null) { + descr = new HtmlLayoutContainer("

"); + descr.addStyleName("workflow-fieldDescription"); + + } else { + listBox.setToolTip(p.getDescription()); + descr = new HtmlLayoutContainer("

" + + p.getDescription() + "

"); + descr.addStyleName("workflow-fieldDescription"); + } + horiz.add(listBox, new BoxLayoutData(new Margins())); + horiz.add(descr, new BoxLayoutData(new Margins())); + + fieldContainer.add(horiz); + } /** diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/FloatFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/FloatFld.java index c379c66..360008e 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/FloatFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/FloatFld.java @@ -4,6 +4,14 @@ import org.gcube.portlets.user.dataminermanager.client.bean.parameters.ObjectPar import org.gcube.portlets.user.dataminermanager.client.bean.parameters.Parameter; import com.google.gwt.user.client.ui.Widget; +import com.sencha.gxt.core.client.util.Margins; +import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutData; +import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutPack; +import com.sencha.gxt.widget.core.client.container.HBoxLayoutContainer; +import com.sencha.gxt.widget.core.client.container.HtmlLayoutContainer; +import com.sencha.gxt.widget.core.client.container.SimpleContainer; +import com.sencha.gxt.widget.core.client.container.VerticalLayoutContainer; +import com.sencha.gxt.widget.core.client.container.VerticalLayoutContainer.VerticalLayoutData; import com.sencha.gxt.widget.core.client.form.FloatField; /** @@ -13,7 +21,8 @@ import com.sencha.gxt.widget.core.client.form.FloatField; * */ public class FloatFld extends AbstractFld { - + + private SimpleContainer fieldContainer; private FloatField numberField; /** @@ -21,17 +30,50 @@ public class FloatFld extends AbstractFld { */ public FloatFld(Parameter parameter) { super(parameter); + fieldContainer=new SimpleContainer(); + HBoxLayoutContainer horiz = new HBoxLayoutContainer(); + horiz.setPack(BoxLayoutPack.START); + horiz.setEnableOverflow(false); ObjectParameter p = (ObjectParameter) parameter; - + + numberField = new FloatField(); if (p.getDefaultValue() != null) numberField.setValue(Float.parseFloat(p.getDefaultValue())); + else + numberField.setAllowBlank(false); + - if (p.getDescription() != null) - numberField.setTitle(p.getDescription()); + HtmlLayoutContainer descr; + if (p.getDescription() == null) { + descr = new HtmlLayoutContainer("

"); + descr.addStyleName("workflow-fieldDescription"); + + } else { + numberField.setToolTip(p.getDescription()); + descr = new HtmlLayoutContainer("

" + + p.getDescription() + "

"); + descr.addStyleName("workflow-fieldDescription"); + } + + SimpleContainer vContainer=new SimpleContainer(); + VerticalLayoutContainer vField = new VerticalLayoutContainer(); + HtmlLayoutContainer typeDescription = new HtmlLayoutContainer( + "Float Value"); + typeDescription.setStylePrimaryName("workflow-parameters-description"); + vField.add(numberField, new VerticalLayoutData(1,-1,new Margins(0))); + vField.add(typeDescription, new VerticalLayoutData(-1,-1,new Margins(0))); + vContainer.add(vField); + + horiz.add(vContainer, new BoxLayoutData(new Margins())); + horiz.add(descr, new BoxLayoutData(new Margins())); + + fieldContainer.add(horiz); + fieldContainer.forceLayout(); + } /** @@ -48,6 +90,6 @@ public class FloatFld extends AbstractFld { */ @Override public Widget getWidget() { - return numberField; + return fieldContainer; } } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/IntFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/IntFld.java index dad45de..1cdcb89 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/IntFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/IntFld.java @@ -1,42 +1,77 @@ - package org.gcube.portlets.user.dataminermanager.client.parametersfield; import org.gcube.portlets.user.dataminermanager.client.bean.parameters.ObjectParameter; import org.gcube.portlets.user.dataminermanager.client.bean.parameters.Parameter; import com.google.gwt.user.client.ui.Widget; +import com.sencha.gxt.core.client.util.Margins; +import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutData; +import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutPack; +import com.sencha.gxt.widget.core.client.container.HBoxLayoutContainer; +import com.sencha.gxt.widget.core.client.container.HtmlLayoutContainer; +import com.sencha.gxt.widget.core.client.container.SimpleContainer; +import com.sencha.gxt.widget.core.client.container.VerticalLayoutContainer; +import com.sencha.gxt.widget.core.client.container.VerticalLayoutContainer.VerticalLayoutData; import com.sencha.gxt.widget.core.client.form.IntegerField; - /** * - * @author Giancarlo Panichi - * email: g.panichi@isti.cnr.it + * @author Giancarlo Panichi email: g.panichi@isti.cnr.it * */ public class IntFld extends AbstractFld { - + private SimpleContainer fieldContainer; private IntegerField numberField; - + /** * @param operator */ public IntFld(Parameter parameter) { super(parameter); + fieldContainer = new SimpleContainer(); + HBoxLayoutContainer horiz = new HBoxLayoutContainer(); + horiz.setPack(BoxLayoutPack.START); + horiz.setEnableOverflow(false); ObjectParameter p = (ObjectParameter) parameter; - numberField = new IntegerField(); if (p.getDefaultValue() != null) numberField.setValue(Integer.parseInt(p.getDefaultValue())); + else + numberField.setAllowBlank(false); + + HtmlLayoutContainer descr; + + if (p.getDescription() == null) { + descr = new HtmlLayoutContainer("

"); + descr.addStyleName("workflow-fieldDescription"); + + } else { + numberField.setToolTip(p.getDescription()); + descr = new HtmlLayoutContainer("

" + + p.getDescription() + "

"); + descr.addStyleName("workflow-fieldDescription"); + } + + SimpleContainer vContainer=new SimpleContainer(); + VerticalLayoutContainer vField = new VerticalLayoutContainer(); + HtmlLayoutContainer typeDescription = new HtmlLayoutContainer( + "Integer Value"); + typeDescription.setStylePrimaryName("workflow-parameters-description"); + vField.add(numberField, new VerticalLayoutData(1,-1,new Margins(0))); + vField.add(typeDescription, new VerticalLayoutData(-1,-1,new Margins(0))); + vContainer.add(vField); + + horiz.add(vContainer, new BoxLayoutData(new Margins())); + horiz.add(descr, new BoxLayoutData(new Margins())); + + fieldContainer.add(horiz); + fieldContainer.forceLayout(); - if (p.getDescription() != null) - numberField.setTitle(p.getDescription()); - numberField.setAllowBlank(false); - } /** @@ -53,7 +88,7 @@ public class IntFld extends AbstractFld { */ @Override public Widget getWidget() { - return numberField; + return fieldContainer; } } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ListStringFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ListStringFld.java index 31a8d34..4e6b1bd 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ListStringFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/ListStringFld.java @@ -11,36 +11,134 @@ import org.gcube.portlets.user.dataminermanager.client.bean.parameters.ListParam import org.gcube.portlets.user.dataminermanager.client.bean.parameters.ObjectParameter; import org.gcube.portlets.user.dataminermanager.client.bean.parameters.Parameter; +import com.google.gwt.editor.client.Editor.Path; import com.google.gwt.user.client.ui.Widget; +import com.sencha.gxt.core.client.ValueProvider; +import com.sencha.gxt.core.client.util.Margins; +import com.sencha.gxt.data.shared.ModelKeyProvider; +import com.sencha.gxt.data.shared.PropertyAccess; import com.sencha.gxt.widget.core.client.button.TextButton; -import com.sencha.gxt.widget.core.client.container.HorizontalLayoutContainer; +import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutData; +import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutPack; +import com.sencha.gxt.widget.core.client.container.HBoxLayoutContainer; +import com.sencha.gxt.widget.core.client.container.HtmlLayoutContainer; +import com.sencha.gxt.widget.core.client.container.MarginData; import com.sencha.gxt.widget.core.client.container.SimpleContainer; import com.sencha.gxt.widget.core.client.container.VerticalLayoutContainer; +import com.sencha.gxt.widget.core.client.container.VerticalLayoutContainer.VerticalLayoutData; import com.sencha.gxt.widget.core.client.event.SelectEvent; +import com.sencha.gxt.widget.core.client.form.TextField; /** - * @author ceras + * + * @author Giancarlo Panichi + * email: g.panichi@isti.cnr.it * */ public class ListStringFld extends AbstractFld { - - private List items; - private SimpleContainer simpleContainer; + + private SimpleContainer fieldContainer; + private HBoxLayoutContainer horiz; + private SimpleContainer listContainer; private VerticalLayoutContainer vp; + private List items; private ListParameter listParameter; - /** + * * @param parameter */ public ListStringFld(Parameter parameter) { super(parameter); - this.listParameter = (ListParameter) parameter; - simpleContainer = new SimpleContainer(); + + listParameter = (ListParameter) parameter; + + listContainer = new SimpleContainer(); vp = new VerticalLayoutContainer(); - simpleContainer.add(vp); items = new ArrayList(); addField(null); + listContainer.add(vp, new MarginData(new Margins())); + + fieldContainer = new SimpleContainer(); + horiz = new HBoxLayoutContainer(); + horiz.setPack(BoxLayoutPack.START); + horiz.setEnableOverflow(false); + + HtmlLayoutContainer descr; + + if (listParameter.getDescription() == null) { + descr = new HtmlLayoutContainer("

"); + descr.addStyleName("workflow-fieldDescription"); + + } else { + listContainer.setToolTip(listParameter.getDescription()); + descr = new HtmlLayoutContainer("

" + + listParameter.getDescription() + "

"); + descr.addStyleName("workflow-fieldDescription"); + } + + horiz.add(listContainer, new BoxLayoutData(new Margins(0))); + horiz.add(descr, new BoxLayoutData(new Margins(0))); + + fieldContainer.add(horiz); + fieldContainer.forceLayout(); } + + public interface ItemStringProperties extends PropertyAccess { + + @Path("id") + ModelKeyProvider id(); + ValueProvider label(); + + + } + + public class ItemString{ + + + } + + /* + private void gridDef(){ + ColumnConfig nameCol = new ColumnConfig(props.name(), 50, "Company"); + ColumnConfig symbolCol = new ColumnConfig(props.symbol(), 75, "Symbol"); + ColumnConfig lastCol = new ColumnConfig(props.last(), 75, "Last"); + ColumnConfig changeCol = new ColumnConfig(props.change(), 75, "Change"); + ColumnConfig lastTransCol = new ColumnConfig(props.lastTrans(), 100, "Last Updated"); + + final NumberFormat number = NumberFormat.getFormat("0.00"); + changeCol.setCell(new AbstractCell() { + @Override + public void render(Context context, Double value, SafeHtmlBuilder sb) { + String style = "style='color: " + (value < 0 ? "red" : "green") + "'"; + String v = number.format(value); + sb.appendHtmlConstant("" + v + ""); + } + }); + + lastTransCol.setCell(new DateCell(DateTimeFormat.getFormat("MM/dd/yyyy"))); + + List> columns = new ArrayList>(); + columns.add(nameCol); + columns.add(symbolCol); + columns.add(lastCol); + columns.add(changeCol); + columns.add(lastTransCol); + + ColumnModel cm = new ColumnModel(columns); + + ListStore store = new ListStore(props.key()); + store.addAll(TestData.getStocks()); + + final Grid grid = new Grid(store, cm); + grid.setAllowTextSelection(true); + grid.getView().setAutoExpandColumn(nameCol); + grid.getView().setStripeRows(true); + grid.getView().setColumnLines(true); + grid.setBorders(false); + grid.setColumnReordering(true); + + } + */ private void addField(Item upperItem) { @@ -50,7 +148,7 @@ public class ListStringFld extends AbstractFld { if (upperItem == null) { Item item = new Item(objPar, true); items.add(item); - vp.add(item); + vp.add(item, new VerticalLayoutData(1,-1, new Margins())); } else { // search the position of the upper item int pos = 0; @@ -65,8 +163,7 @@ public class ListStringFld extends AbstractFld { items.add(pos + 1, item); vp.insert(item, pos + 1); } - - simpleContainer.forceLayout(); + } /** @@ -80,8 +177,8 @@ public class ListStringFld extends AbstractFld { if (items.size() == 1) { items.get(0).hideCancelButton(); } - - simpleContainer.forceLayout(); + + } @@ -108,7 +205,7 @@ public class ListStringFld extends AbstractFld { */ @Override public Widget getWidget() { - return simpleContainer; + return fieldContainer; } /** @@ -118,74 +215,88 @@ public class ListStringFld extends AbstractFld { public boolean isValid() { boolean valid = false; for (Item item : items) - if (item.getField().getValue() != null) { + if (item.isValid()) { valid = true; break; } return valid; } - private class Item extends HorizontalLayoutContainer { + private class Item extends HBoxLayoutContainer { - private StringFld field; - private TextButton addButton; - private TextButton removeButton; + private TextField field; + private TextButton addBtn; + private TextButton removeBtn; /** * @param objPar */ public Item(ObjectParameter objectParameter, boolean first) { super(); - this.field = new StringFld(objectParameter); - this.add(field.getWidget()); + + field = new TextField(); + field.setAllowBlank(false); + + addBtn = new TextButton(""); - addButton = new TextButton(""); + addBtn.setIcon(DataMinerManager.resources.add()); - addButton.setIcon(DataMinerManager.resources.add()); - - addButton.addSelectHandler(new SelectEvent.SelectHandler() { + addBtn.addSelectHandler(new SelectEvent.SelectHandler() { @Override public void onSelect(SelectEvent event) { addField(Item.this); + forceLayout(); + vp.forceLayout(); + fieldContainer.forceLayout(); } }); - removeButton = new TextButton(""); + removeBtn = new TextButton(""); - removeButton.setIcon(DataMinerManager.resources.cancel()); + removeBtn.setIcon(DataMinerManager.resources.cancel()); - removeButton.addSelectHandler(new SelectEvent.SelectHandler() { + removeBtn.addSelectHandler(new SelectEvent.SelectHandler() { @Override public void onSelect(SelectEvent event) { removeField(Item.this); + forceLayout(); + vp.forceLayout(); + fieldContainer.forceLayout(); } }); - removeButton.setVisible(!first); - - this.add(addButton); - this.add(removeButton); + removeBtn.setVisible(!first); + + setPack(BoxLayoutPack.START); + setEnableOverflow(false); + add(field, new BoxLayoutData(new Margins())); + add(addBtn,new BoxLayoutData(new Margins())); + add(removeBtn,new BoxLayoutData(new Margins())); + + forceLayout(); } public void showCancelButton() { - removeButton.setVisible(true); + removeBtn.setVisible(true); } public void hideCancelButton() { - removeButton.setVisible(false); + removeBtn.setVisible(false); } public String getValue() { - return field.getValue(); + return field.getCurrentValue(); + } + + public boolean isValid(){ + return field.isValid(); } - public StringFld getField() { - return field; - } + } } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/StringFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/StringFld.java index 71aecf1..c1dec9f 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/StringFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/StringFld.java @@ -8,13 +8,11 @@ import org.gcube.portlets.user.dataminermanager.client.bean.parameters.Parameter import com.google.gwt.user.client.ui.Widget; import com.sencha.gxt.core.client.util.Margins; -import com.sencha.gxt.core.client.util.Padding; import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutData; import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutPack; import com.sencha.gxt.widget.core.client.container.HBoxLayoutContainer; -import com.sencha.gxt.widget.core.client.container.HorizontalLayoutContainer; -import com.sencha.gxt.widget.core.client.container.HorizontalLayoutContainer.HorizontalLayoutData; import com.sencha.gxt.widget.core.client.container.HtmlLayoutContainer; +import com.sencha.gxt.widget.core.client.container.MarginData; import com.sencha.gxt.widget.core.client.container.SimpleContainer; import com.sencha.gxt.widget.core.client.form.TextField; @@ -26,8 +24,7 @@ import com.sencha.gxt.widget.core.client.form.TextField; */ public class StringFld extends AbstractFld { - private SimpleContainer simpleContainer; - private HorizontalLayoutContainer horiz; + private SimpleContainer fieldContainer; private TextField textField; /** @@ -35,11 +32,7 @@ public class StringFld extends AbstractFld { */ public StringFld(Parameter parameter) { super(parameter); - simpleContainer=new SimpleContainer(); - HBoxLayoutContainer horiz = new HBoxLayoutContainer(); - horiz.setPack(BoxLayoutPack.START); - horiz.setEnableOverflow(false); - + ObjectParameter p = (ObjectParameter) parameter; textField = new TextField(); @@ -60,12 +53,16 @@ public class StringFld extends AbstractFld { descr.addStyleName("workflow-fieldDescription"); } + fieldContainer=new SimpleContainer(); + HBoxLayoutContainer horiz = new HBoxLayoutContainer(); + horiz.setPack(BoxLayoutPack.START); + horiz.setEnableOverflow(false); + horiz.add(textField,new BoxLayoutData(new Margins())); horiz.add(descr,new BoxLayoutData(new Margins())); - - /*horiz.add(textField, new HorizontalLayoutData(-1,-1,new Margins())); - horiz.add(descr,new HorizontalLayoutData(1,-1,new Margins()));*/ - simpleContainer.add(horiz); + fieldContainer.add(horiz,new MarginData(new Margins())); + + fieldContainer.forceLayout(); } /** @@ -81,7 +78,7 @@ public class StringFld extends AbstractFld { */ @Override public Widget getWidget() { - return simpleContainer; + return fieldContainer; } } diff --git a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/TabularFld.java b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/TabularFld.java index 81fb5e5..a20b79f 100644 --- a/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/TabularFld.java +++ b/src/main/java/org/gcube/portlets/user/dataminermanager/client/parametersfield/TabularFld.java @@ -10,6 +10,9 @@ import java.util.List; import org.gcube.portlets.user.dataminermanager.client.DataMinerManager; import org.gcube.portlets.user.dataminermanager.client.bean.parameters.Parameter; import org.gcube.portlets.user.dataminermanager.client.bean.parameters.TabularParameter; +import org.gcube.portlets.user.dataminermanager.client.events.TabularFldChangeEvent; +import org.gcube.portlets.user.dataminermanager.client.events.TabularFldChangeEvent.HasTabularFldChangeEventHandler; +import org.gcube.portlets.user.dataminermanager.client.events.TabularFldChangeEvent.TabularFldChangeEventHandler; import org.gcube.portlets.user.dataminermanager.client.rpc.DataMinerPortletServiceAsync; import org.gcube.portlets.user.dataminermanager.client.util.UtilsGXT3; import org.gcube.portlets.user.dataminermanager.shared.data.TableItemSimple; @@ -20,17 +23,23 @@ import org.gcube.portlets.widgets.wsexplorer.shared.Item; import org.gcube.portlets.widgets.wsexplorer.shared.ItemType; import com.allen_sauer.gwt.log.client.Log; +import com.google.gwt.event.shared.GwtEvent; +import com.google.gwt.event.shared.HandlerRegistration; import com.google.gwt.user.client.rpc.AsyncCallback; -import com.google.gwt.user.client.ui.HTML; import com.google.gwt.user.client.ui.Widget; import com.sencha.gxt.core.client.dom.XDOM; import com.sencha.gxt.core.client.util.Format; +import com.sencha.gxt.core.client.util.Margins; import com.sencha.gxt.widget.core.client.button.TextButton; -import com.sencha.gxt.widget.core.client.container.HorizontalLayoutContainer; +import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutData; +import com.sencha.gxt.widget.core.client.container.BoxLayoutContainer.BoxLayoutPack; +import com.sencha.gxt.widget.core.client.container.HBoxLayoutContainer; +import com.sencha.gxt.widget.core.client.container.HtmlLayoutContainer; import com.sencha.gxt.widget.core.client.container.MarginData; import com.sencha.gxt.widget.core.client.container.SimpleContainer; import com.sencha.gxt.widget.core.client.container.VerticalLayoutContainer; import com.sencha.gxt.widget.core.client.event.SelectEvent; +import com.sencha.gxt.widget.core.client.form.TextField; /** * @@ -38,17 +47,17 @@ import com.sencha.gxt.widget.core.client.event.SelectEvent; * email: g.panichi@isti.cnr.it * */ -public class TabularFld extends AbstractFld { +public class TabularFld extends AbstractFld implements HasTabularFldChangeEventHandler { - private SimpleContainer sc; + private SimpleContainer fieldContainer; + private HBoxLayoutContainer horiz; private VerticalLayoutContainer vp; - // TableSelector tableSelector; private WorkspaceExplorerSelectDialog wselectDialog; private TextButton selectButton, selectButton2, cancelButton; - private HTML templatesList; + private HtmlLayoutContainer templatesList; private TableItemSimple selectedTableItem = null; - private List listeners = new ArrayList(); + //private List listeners = new ArrayList(); /** * @param parameter @@ -56,11 +65,36 @@ public class TabularFld extends AbstractFld { public TabularFld(Parameter parameter) { super(parameter); Log.debug("TabularField"); + + SimpleContainer tabContainer=new SimpleContainer(); vp=new VerticalLayoutContainer(); init(); - sc=new SimpleContainer(); - sc.add(vp, new MarginData(0)); + tabContainer.add(vp, new MarginData(new Margins(0))); + + fieldContainer = new SimpleContainer(); + horiz = new HBoxLayoutContainer(); + horiz.setPack(BoxLayoutPack.START); + horiz.setEnableOverflow(false); + + HtmlLayoutContainer descr; + + if (parameter.getDescription() == null) { + descr = new HtmlLayoutContainer("

"); + descr.addStyleName("workflow-fieldDescription"); + + } else { + tabContainer.setToolTip(parameter.getDescription()); + descr = new HtmlLayoutContainer("

" + + parameter.getDescription() + "

"); + descr.addStyleName("workflow-fieldDescription"); + } + + horiz.add(tabContainer, new BoxLayoutData(new Margins())); + horiz.add(descr, new BoxLayoutData(new Margins())); + + fieldContainer.add(horiz); showNoSelectionField(); + } @@ -68,17 +102,6 @@ public class TabularFld extends AbstractFld { TabularParameter p = (TabularParameter) parameter; List templates = p.getTemplates(); - /* - * tableSelector = new TableSelector(templates) { - * - * @Override public void fireSelection(TableItemSimple tableItem) { - * super.fireSelection(tableItem); selectedTableItem = tableItem; - * showFieldWithSelection(); - * - * loadTableMetadata(tableItem); // send change message to all listeners - * // it will be managed by all columnFields and columnListField that - * depends by tabular field } }; - */ List selectableTypes = new ArrayList(); selectableTypes.add(ItemType.EXTERNAL_FILE); List showableTypes = new ArrayList(); @@ -183,11 +206,11 @@ public class TabularFld extends AbstractFld { String list = ""; boolean first = true; for (String template : templates) { - list += (first ? "" : ", ") + template; + list += (first ? "" : ", ") + Format.ellipse(template,50); first = false; } - templatesList = new HTML("Suitable Data Set Templates:
" + list); - templatesList.addStyleName("workflow-templatesList"); + templatesList = new HtmlLayoutContainer("

Suitable Data Set Templates:
" + list+"

"); + templatesList.addStyleName("workflow-parameters-description"); } @@ -200,7 +223,6 @@ public class TabularFld extends AbstractFld { Log.error("Error in retrieveTableInformation " + caught.getMessage()); if (caught instanceof ExpiredSessionServiceException) { UtilsGXT3.alert("Error", "Expired Session"); - //sessionExpiredShowDelayed(); } else { UtilsGXT3.alert( @@ -216,54 +238,14 @@ public class TabularFld extends AbstractFld { Log.debug("Retrieved: "+result); selectedTableItem=result; showFieldWithSelection(); - //loadTableMetadata(selectedTableItem); updateListeners(selectedTableItem); } }); } - /** - * @param tableItem - - protected void loadTableMetadata(final TableItemSimple tableItem) { - //TabularData tabularData = DataMinerManager.getTabularData(); - String tableId = tableItem.getId(); - - tabularData.getTableDefinition(tableId, - new AsyncCallback() { - - @Override - public void onFailure(Throwable caught) { - vp.unmask(); - Info.display("ERROR", ""); - } - - @Override - public void onSuccess(TableDefinition tableDefinition) { - vp.unmask(); - List columns = tableDefinition - .getColumnsAsList(); - Collections.sort(columns, new ColumnPositionComparator( - false)); - for (ColumnDefinition column : columns) - tableItem.addColumnName(column.getLabel()); - updateListeners(tableItem); - } - - }); - - vp.mask("Load Data Set Metadata...", Constants.maskLoadingStyle); - }*/ - - /** - * @param id - */ - protected void updateListeners(TableItemSimple tableItem) { - for (AbstractFld abstractField : listeners) { - abstractField.fireEvent(tableItem); - } - } + + /** * */ @@ -271,23 +253,26 @@ public class TabularFld extends AbstractFld { vp.clear(); vp.add(selectButton); vp.add(templatesList); - sc.forceLayout(); + fieldContainer.forceLayout(); } /** * */ private void showFieldWithSelection() { - //final String tableId = selectedTableItem.getId(); - final String tableName = selectedTableItem.getName(); - - vp.clear(); - HorizontalLayoutContainer hp = new HorizontalLayoutContainer(); - hp.add(new HTML("
" - + Format.ellipse(tableName, 30) + "
")); - hp.add(selectButton2); - hp.add(cancelButton); + String tableName = selectedTableItem.getName(); + if(tableName==null|| tableName.isEmpty()){ + tableName="NoName"; + } + + TextField tableDescription=new TextField(); + tableDescription.setValue(tableName); + tableDescription.setReadOnly(true); + + /*HTML tableDescription=new HTML("
" + + Format.ellipse(tableName, 30) + "
");*/ + TextButton openTableButton = new TextButton("Open Data Set"); openTableButton.addSelectHandler(new SelectEvent.SelectHandler() { @@ -319,10 +304,16 @@ public class TabularFld extends AbstractFld { } }); - hp.add(openTableButton); - vp.add(hp); + + HBoxLayoutContainer h=new HBoxLayoutContainer(); + h.add(tableDescription,new BoxLayoutData(new Margins())); + h.add(selectButton2,new BoxLayoutData(new Margins())); + h.add(cancelButton,new BoxLayoutData(new Margins())); + vp.clear(); + vp.add(h); vp.add(templatesList); - sc.forceLayout(); + vp.forceLayout(); + fieldContainer.forceLayout(); } /** @@ -338,7 +329,7 @@ public class TabularFld extends AbstractFld { */ @Override public Widget getWidget() { - return sc; + return fieldContainer; } /** @@ -349,7 +340,25 @@ public class TabularFld extends AbstractFld { return (selectedTableItem != null); } - public void addChangeListener(AbstractFld abstractField) { - this.listeners.add(abstractField); + + + @Override + public HandlerRegistration addTabularFldChangeEventHandler( + TabularFldChangeEventHandler handler) { + return fieldContainer.addHandler(handler, TabularFldChangeEvent.getType()); + + } + + + + private void updateListeners(TableItemSimple tableItemSimple) { + TabularFldChangeEvent event=new TabularFldChangeEvent(tableItemSimple); + fireEvent(event); + } + + + @Override + public void fireEvent(GwtEvent event) { + fieldContainer.fireEvent(event); } } diff --git a/src/main/webapp/DataMinerManager.css b/src/main/webapp/DataMinerManager.css index 68c71bc..1724219 100644 --- a/src/main/webapp/DataMinerManager.css +++ b/src/main/webapp/DataMinerManager.css @@ -296,9 +296,9 @@ .workflow-parameters-preload { display: block; - margin: auto; - margin-left: auto; - margin-right: auto; + background: url('ajax-loader-big.gif') no-repeat; + width: 400px; + height: 180px; } /* TOOLTIP */ diff --git a/test.log.1 b/test.log.1 new file mode 100644 index 0000000..f3f5961 --- /dev/null +++ b/test.log.1 @@ -0,0 +1,101397 @@ +2016-04-06 18:34:04 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 18:34:04 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 18:35:00 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 18:35:00 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 18:35:00 WARN SessionCheckerServiceImpl:68 - Stopping session polling as i think you are in development mode +2016-04-06 18:37:11 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 18:37:11 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 18:38:07 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 18:38:07 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 18:38:07 WARN SessionCheckerServiceImpl:68 - Stopping session polling as i think you are in development mode +2016-04-06 18:53:16 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 18:53:16 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 18:54:12 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 18:54:12 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 18:54:12 WARN SessionCheckerServiceImpl:68 - Stopping session polling as i think you are in development mode +2016-04-06 19:00:45 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 19:00:45 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 19:00:45 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-06 19:00:45 INFO SessionUtil:64 - no user found in session, use test user +2016-04-06 19:00:45 ERROR DataMinerManagerServiceImpl:120 - An error occurred getting the OperatorsClassifications list +org.gcube.portlets.user.dataminermanager.shared.exception.ExpiredSessionServiceException: Session Expired! + at org.gcube.portlets.user.dataminermanager.server.util.SessionUtil.getASLSession(SessionUtil.java:65) + at org.gcube.portlets.user.dataminermanager.server.DataMinerManagerServiceImpl.getOperatorsClassifications(DataMinerManagerServiceImpl.java:115) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) + at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) + at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) + at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) + at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) + at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) + at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) + at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) + at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) + at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.Server.handle(Server.java:370) + at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) + at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) + at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) + at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) + at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) + at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) + at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) + at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) + at java.lang.Thread.run(Thread.java:745) +2016-04-06 19:00:45 INFO SessionUtil:64 - no user found in session, use test user +2016-04-06 19:02:07 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 19:02:07 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 19:02:07 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-06 19:02:07 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:02:07 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:02:07 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:02:07 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:02:07 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7a9ec15d +2016-04-06 19:02:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:02:07 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:02:07 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:02:07 DEBUG TemplateModel:83 - 2016-04-06 19:02:07, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:02:07 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:02:07 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:02:07 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 120 ms +2016-04-06 19:02:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-06 19:02:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-06 19:02:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-06 19:02:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-06 19:02:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-06 19:02:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-06 19:02:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-06 19:02:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-06 19:02:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-06 19:02:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-06 19:02:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-06 19:02:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-06 19:02:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-06 19:02:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-06 19:02:07 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-06 19:02:08 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:02:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:02:08 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:02:08 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:02:08 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 19:02:08 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:02:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 19:02:08 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:02:08 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:02:08 DEBUG TemplateModel:83 - 2016-04-06 19:02:08, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:02:08 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:02:08 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-06 19:02:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@4d0b6d49 +2016-04-06 19:02:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1d7961e4 +2016-04-06 19:02:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@7dfe22a8 +2016-04-06 19:02:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@4cee083c +2016-04-06 19:02:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 311 ms +2016-04-06 19:02:08 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-06 19:02:08 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-06 19:02:08 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-06 19:02:08 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-06 19:02:08 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-06 19:02:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:02:08 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:02:08 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:02:08 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:02:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 19:02:08 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-06 19:02:08 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:02:08 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-06 19:02:08 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:02:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-06 19:02:08 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:02:08 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:02:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 28 ms +2016-04-06 19:02:08 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-06 19:02:08 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:02:08 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-06 19:02:08 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 19:02:09 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-06 19:02:09 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-06 19:02:09 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-06 19:02:09 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-06 19:02:09 DEBUG JCRRepository:271 - Initialize repository +2016-04-06 19:02:09 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-06 19:02:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-06 19:02:09 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:02:09 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-06 19:02:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 217 ms +2016-04-06 19:02:09 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-06 19:02:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 19:02:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:02:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:02:09 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-06 19:02:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:02:09 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-06 19:02:09 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:02:10 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:02:10 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-06 19:02:10 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:02:10 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:02:10 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-06 19:02:10 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-06 19:02:10 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:02:10 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:02:10 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:02:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:02:10 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-06 19:02:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-06 19:02:10 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:02:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:02:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:02:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:02:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:02:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:02:10 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:02:10 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:02:10 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:02:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-06 19:02:10 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:02:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:02:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:02:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:02:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:02:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:02:10 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:02:10 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:02:10 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:02:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-06 19:02:10 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:02:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:02:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:02:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:02:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:02:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:02:10 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:02:10 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:02:10 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:02:10 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-06 19:02:10 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:02:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-06 19:02:10 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:02:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:02:11 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:02:11 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:02:11 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:02:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:02:11 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 19:02:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:02:11 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:02:11 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:02:11 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:02:11 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:02:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:02:11 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:02:11 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-06 19:02:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:02:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:02:11 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-06 19:02:11 INFO WorkspaceExplorerServiceImpl:188 - end time - 169 msc 0 sec +2016-04-06 19:02:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 19:02:52 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-06 19:02:52 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:02:52 INFO SClient4WPS:643 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS +2016-04-06 19:02:52 DEBUG SClient4WPS:276 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:02:52 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 19:02:53 DEBUG SClient4WPS:297 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS + XMEANS + A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + + + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + + + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-06 19:02:53 DEBUG SClient4WPS:301 - WPSClient->Fetching Inputs +2016-04-06 19:02:53 DEBUG SClient4WPS:303 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-06 19:02:53 DEBUG SClient4WPS:303 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-06 19:02:53 DEBUG SClient4WPS:303 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-06 19:02:53 DEBUG SClient4WPS:303 - WPSClient->Input: + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + +2016-04-06 19:02:53 DEBUG SClient4WPS:303 - WPSClient->Input: + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + +2016-04-06 19:02:53 DEBUG SClient4WPS:303 - WPSClient->Input: + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + +2016-04-06 19:02:53 DEBUG SClient4WPS:303 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-06 19:02:53 DEBUG SClient4WPS:308 - WPSClient->Fetching Outputs +2016-04-06 19:02:53 DEBUG SClient4WPS:310 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-06 19:02:53 DEBUG SClient4WPS:310 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-06 19:02:53 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:02:53 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-06 19:02:53 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-06 19:02:53 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-06 19:02:53 DEBUG WPS2SM:201 - Schema: null +2016-04-06 19:02:53 DEBUG WPS2SM:202 - Encoding: null +2016-04-06 19:02:53 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-06 19:02:53 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-06 19:02:53 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-06 19:02:53 DEBUG SClient4WPS:658 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-06 19:02:53 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-06 19:02:53 DEBUG WPS2SM:93 - WPS type: +2016-04-06 19:02:53 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-06 19:02:53 DEBUG WPS2SM:101 - Guessed default value: +2016-04-06 19:02:53 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-06 19:02:53 DEBUG WPS2SM:112 - Machter find: true +2016-04-06 19:02:53 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-06 19:02:53 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-06 19:02:53 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-06 19:02:53 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-06 19:02:53 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-06 19:02:53 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-06 19:02:53 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-06 19:02:53 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-06 19:02:53 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-06 19:02:53 DEBUG SClient4WPS:658 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-06 19:02:53 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-06 19:02:53 DEBUG WPS2SM:93 - WPS type: +2016-04-06 19:02:53 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-06 19:02:53 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-06 19:02:53 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-06 19:02:53 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-06 19:02:53 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-06 19:02:53 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-06 19:02:53 DEBUG WPS2SM:254 - Conversion to SM Type->maxIterations is a Literal Input +2016-04-06 19:02:53 DEBUG WPS2SM:93 - WPS type: +2016-04-06 19:02:53 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-06 19:02:53 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-06 19:02:53 DEBUG WPS2SM:290 - Conversion to SM Type->Title:XMeans max number of overall iterations of the clustering learning +2016-04-06 19:02:53 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxIterations +2016-04-06 19:02:53 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-06 19:02:53 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-06 19:02:53 DEBUG WPS2SM:254 - Conversion to SM Type->minClusters is a Literal Input +2016-04-06 19:02:53 DEBUG WPS2SM:93 - WPS type: +2016-04-06 19:02:53 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-06 19:02:53 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-06 19:02:53 DEBUG WPS2SM:290 - Conversion to SM Type->Title:minimum number of expected clusters +2016-04-06 19:02:53 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minClusters +2016-04-06 19:02:53 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-06 19:02:53 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-06 19:02:53 DEBUG WPS2SM:254 - Conversion to SM Type->maxClusters is a Literal Input +2016-04-06 19:02:53 DEBUG WPS2SM:93 - WPS type: +2016-04-06 19:02:53 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-06 19:02:53 DEBUG WPS2SM:101 - Guessed default value: 50 +2016-04-06 19:02:53 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of clusters to produce +2016-04-06 19:02:53 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxClusters +2016-04-06 19:02:53 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-06 19:02:53 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT] +2016-04-06 19:02:53 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-06 19:02:53 DEBUG WPS2SM:93 - WPS type: +2016-04-06 19:02:53 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-06 19:02:53 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-06 19:02:53 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-06 19:02:53 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-06 19:02:53 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-06 19:02:53 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-06 19:02:53 DEBUG SClient4WPS:662 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 19:02:54 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 19:02:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:02:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:02:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:02:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 19:02:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:02:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:02:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:02:54 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 19:02:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:02:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:02:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:02:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:02:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:02:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:02:54 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-06 19:02:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-06 19:02:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 19:02:54 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-06 19:02:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:02:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:02:54 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:02:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:02:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:02:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:02:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:02:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:02:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:02:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:02:54 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:02:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:02:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:02:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:02:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:02:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:02:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:02:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:02:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:02:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:02:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:02:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:02:54 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:02:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:02:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:02:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:02:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:02:54 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:02:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:02:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-06 19:02:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 29 ms +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-06 19:02:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:02:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-06 19:02:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 18 ms +2016-04-06 19:02:54 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-06 19:02:54 INFO ISClientConnector:82 - found only one RR, take it +2016-04-06 19:02:54 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-06 19:02:54 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-06 19:02:54 DEBUG StorageClient:517 - set scope: /gcube +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-06 19:02:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:02:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-06 19:02:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-06 19:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:02:54 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-06 19:02:54 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-06 19:02:54 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-06 19:02:54 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-06 19:02:54 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-06 19:02:54 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-06 19:02:54 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-06 19:02:54 INFO WorkspaceExplorerServiceImpl:142 - end time - 409 msc 0 sec +2016-04-06 19:02:54 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-06 19:03:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 19:03:03 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-06 19:03:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 19:03:58 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 19:04:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 19:04:53 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 19:05:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 19:05:48 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-06 19:06:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 19:06:43 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-06 19:08:05 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 19:08:05 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 19:08:05 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-06 19:08:05 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:08:05 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:08:05 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:08:05 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:08:05 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@3cc9257c +2016-04-06 19:08:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:08:05 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:08:05 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:08:05 DEBUG TemplateModel:83 - 2016-04-06 19:08:05, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:08:05 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:08:05 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:08:05 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 116 ms +2016-04-06 19:08:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-06 19:08:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-06 19:08:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-06 19:08:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-06 19:08:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-06 19:08:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-06 19:08:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-06 19:08:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-06 19:08:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-06 19:08:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-06 19:08:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-06 19:08:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-06 19:08:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-06 19:08:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-06 19:08:05 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-06 19:08:05 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:08:05 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:08:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6c2c6668 +2016-04-06 19:08:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@7ce56a45 +2016-04-06 19:08:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@3c8c486 +2016-04-06 19:08:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@933b857 +2016-04-06 19:08:05 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 128 ms +2016-04-06 19:08:06 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-06 19:08:06 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-06 19:08:06 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-06 19:08:06 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-06 19:08:06 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-06 19:08:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:08:06 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:08:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-06 19:08:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-06 19:08:06 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-06 19:08:06 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:08:06 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-06 19:08:06 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 19:08:06 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:08:06 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:08:06 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-06 19:08:06 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:08:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 19:08:06 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:08:06 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:08:06 DEBUG TemplateModel:83 - 2016-04-06 19:08:06, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:08:06 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:08:06 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-06 19:08:06 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:08:06 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:08:06 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:08:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:08:06 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 19:08:06 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:08:06 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-06 19:08:06 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:08:06 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:08:06 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-06 19:08:06 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-06 19:08:06 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-06 19:08:06 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-06 19:08:06 DEBUG JCRRepository:271 - Initialize repository +2016-04-06 19:08:06 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-06 19:08:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-06 19:08:06 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:08:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-06 19:08:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-06 19:08:06 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-06 19:08:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:08:06 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:08:06 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:08:06 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-06 19:08:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:08:06 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-06 19:08:07 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:08:07 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-06 19:08:07 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:08:07 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:08:07 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-06 19:08:07 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:08:07 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:08:07 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-06 19:08:07 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-06 19:08:07 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:08:07 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:08:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:08:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:08:07 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-06 19:08:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-06 19:08:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:08:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:08:07 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:08:07 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:08:07 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:08:07 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:08:07 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:08:07 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:08:07 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:08:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-06 19:08:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:08:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:08:07 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:08:07 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:08:07 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:08:07 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:08:07 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:08:07 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:08:07 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:08:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-06 19:08:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:08:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:08:07 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:08:07 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:08:07 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:08:07 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:08:07 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:08:07 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:08:07 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:08:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-06 19:08:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:08:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:08:08 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:08:08 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:08:08 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:08:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 19:08:08 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-06 19:08:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 19:08:08 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:08:08 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:08:08 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:08:08 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:08:08 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:08:08 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:08:08 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-06 19:08:08 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:08:08 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:08:08 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-06 19:08:08 INFO WorkspaceExplorerServiceImpl:188 - end time - 102 msc 0 sec +2016-04-06 19:09:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:09:01 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:09:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:09:56 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 19:14:06 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 19:14:06 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 19:14:06 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-06 19:14:06 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:14:06 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:14:06 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-06 19:14:06 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:14:06 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@61cfe9ef +2016-04-06 19:14:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 19:14:06 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:14:06 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:14:06 DEBUG TemplateModel:83 - 2016-04-06 19:14:06, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:14:06 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:14:06 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:14:06 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 108 ms +2016-04-06 19:14:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-06 19:14:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-06 19:14:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-06 19:14:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-06 19:14:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-06 19:14:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-06 19:14:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-06 19:14:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-06 19:14:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-06 19:14:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-06 19:14:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-06 19:14:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-06 19:14:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-06 19:14:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-06 19:14:06 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-06 19:14:06 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:14:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:14:06 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@518d70cc +2016-04-06 19:14:06 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@20381c30 +2016-04-06 19:14:06 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@e2ead9e +2016-04-06 19:14:06 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@7470ef19 +2016-04-06 19:14:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 103 ms +2016-04-06 19:14:06 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-06 19:14:07 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:14:07 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:14:07 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:14:07 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:14:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:14:07 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:14:07 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:14:07 DEBUG TemplateModel:83 - 2016-04-06 19:14:07, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:14:07 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:14:07 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-06 19:14:07 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:14:07 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:14:07 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:14:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 19:14:07 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 19:14:07 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:14:07 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-06 19:14:07 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:14:07 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:14:07 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-06 19:14:07 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-06 19:14:07 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-06 19:14:07 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-06 19:14:07 DEBUG JCRRepository:271 - Initialize repository +2016-04-06 19:14:07 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-06 19:14:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-06 19:14:07 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:14:07 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-06 19:14:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 202 ms +2016-04-06 19:14:08 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-06 19:14:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 19:14:08 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:14:08 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:14:08 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-06 19:14:08 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:14:08 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-06 19:14:08 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-06 19:14:08 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-06 19:14:08 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-06 19:14:08 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-06 19:14:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 19:14:08 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:14:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-06 19:14:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 31 ms +2016-04-06 19:14:08 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-06 19:14:08 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:14:08 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-06 19:14:08 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 19:14:13 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:14:13 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-06 19:14:14 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:14:14 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:14:14 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-06 19:14:14 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:14:14 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:14:14 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-06 19:14:14 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-06 19:14:14 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:14:14 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:14:14 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:14:14 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:14:14 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-06 19:14:14 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-06 19:14:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:14:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:14:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:14:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:14:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:14:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:14:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:14:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:14:15 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:14:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-06 19:14:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:14:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:14:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:14:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:14:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:14:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:14:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:14:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:14:15 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:14:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-06 19:14:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:14:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:14:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:14:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:14:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:14:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:14:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:14:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:14:15 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:14:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-06 19:14:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:14:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:14:15 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:14:15 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:14:15 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:14:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 19:14:15 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-06 19:14:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 19:14:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:14:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:14:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:14:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:14:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:14:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:14:15 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-06 19:14:16 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:14:16 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:14:16 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-06 19:14:16 INFO WorkspaceExplorerServiceImpl:188 - end time - 103 msc 0 sec +2016-04-06 19:15:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 19:15:02 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-06 19:15:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 19:15:57 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-06 19:16:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:16:52 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:17:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:17:47 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:18:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:18:42 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:19:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:19:37 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 19:20:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:20:32 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 19:21:39 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 19:21:39 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 19:21:39 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-06 19:21:39 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:21:39 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:21:39 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 19:21:39 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:21:39 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@4d8c88 +2016-04-06 19:21:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:21:39 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:21:39 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:21:39 DEBUG TemplateModel:83 - 2016-04-06 19:21:39, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:21:39 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:21:39 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:21:39 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 109 ms +2016-04-06 19:21:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-06 19:21:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-06 19:21:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-06 19:21:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-06 19:21:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-06 19:21:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-06 19:21:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-06 19:21:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-06 19:21:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-06 19:21:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-06 19:21:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-06 19:21:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-06 19:21:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-06 19:21:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-06 19:21:39 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-06 19:21:39 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:21:39 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:21:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@421024f8 +2016-04-06 19:21:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@186b0f8b +2016-04-06 19:21:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@b1b0dfe +2016-04-06 19:21:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@1b12726b +2016-04-06 19:21:39 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 91 ms +2016-04-06 19:21:40 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-06 19:21:40 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-06 19:21:40 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-06 19:21:40 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-06 19:21:40 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-06 19:21:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:21:40 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:21:40 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-06 19:21:40 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 26 ms +2016-04-06 19:21:40 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-06 19:21:40 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:21:40 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-06 19:21:40 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 19:21:40 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:21:40 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:21:40 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 19:21:40 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:21:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 19:21:40 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:21:40 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:21:40 DEBUG TemplateModel:83 - 2016-04-06 19:21:40, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:21:40 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:21:40 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-06 19:21:40 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:21:40 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:21:40 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:21:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 19:21:40 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-06 19:21:40 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:21:40 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-06 19:21:40 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:21:40 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:21:40 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-06 19:21:40 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-06 19:21:40 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-06 19:21:40 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-06 19:21:40 DEBUG JCRRepository:271 - Initialize repository +2016-04-06 19:21:40 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-06 19:21:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-06 19:21:40 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:21:40 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-06 19:21:40 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-06 19:21:40 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-06 19:21:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 19:21:40 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:21:40 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:21:40 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-06 19:21:40 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:21:40 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-06 19:21:41 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:21:41 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-06 19:21:41 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:21:41 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:21:41 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-06 19:21:41 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:21:41 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:21:41 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-06 19:21:41 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-06 19:21:41 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:21:41 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:21:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:21:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:21:41 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-06 19:21:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-06 19:21:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:21:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:21:41 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:21:41 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:21:41 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:21:41 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:21:41 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:21:41 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:21:41 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:21:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-06 19:21:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:21:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:21:41 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:21:41 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:21:41 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:21:41 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:21:41 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:21:41 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:21:41 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:21:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-06 19:21:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:21:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:21:41 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:21:41 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:21:41 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:21:41 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:21:41 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:21:41 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:21:41 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:21:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-06 19:21:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:21:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:21:42 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:21:42 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:21:42 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:21:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 19:21:42 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 19:21:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 19:21:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:21:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:21:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:21:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:21:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:21:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:21:42 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-06 19:21:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:21:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:21:42 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-06 19:21:42 INFO WorkspaceExplorerServiceImpl:188 - end time - 115 msc 0 sec +2016-04-06 19:22:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:22:35 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:23:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 19:23:30 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-06 19:24:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 19:24:25 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 19:25:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 19:25:20 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-06 19:26:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 19:26:15 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-06 19:27:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 19:27:10 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-06 19:27:58 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 19:27:58 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 19:27:58 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-06 19:27:58 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:27:58 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:27:58 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-06 19:27:58 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:27:58 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@c719220 +2016-04-06 19:27:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 19:27:58 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:27:58 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:27:58 DEBUG TemplateModel:83 - 2016-04-06 19:27:58, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:27:58 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:27:58 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:27:58 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 117 ms +2016-04-06 19:27:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-06 19:27:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-06 19:27:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-06 19:27:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-06 19:27:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-06 19:27:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-06 19:27:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-06 19:27:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-06 19:27:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-06 19:27:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-06 19:27:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-06 19:27:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-06 19:27:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-06 19:27:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-06 19:27:59 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-06 19:27:59 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:27:59 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:27:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@52efc766 +2016-04-06 19:27:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@655562fa +2016-04-06 19:27:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@4d19e805 +2016-04-06 19:27:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@185dc0df +2016-04-06 19:27:59 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 134 ms +2016-04-06 19:27:59 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-06 19:27:59 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-06 19:27:59 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-06 19:27:59 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-06 19:27:59 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-06 19:27:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 19:27:59 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:27:59 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-06 19:27:59 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-06 19:27:59 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-06 19:27:59 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:27:59 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-06 19:27:59 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 19:27:59 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:27:59 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:27:59 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:27:59 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:27:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:27:59 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:27:59 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:27:59 DEBUG TemplateModel:83 - 2016-04-06 19:27:59, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:27:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:27:59 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-06 19:28:00 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:28:00 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:28:00 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:28:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 19:28:00 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-06 19:28:00 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:28:00 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-06 19:28:00 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:28:00 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:28:00 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-06 19:28:00 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-06 19:28:00 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-06 19:28:00 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-06 19:28:00 DEBUG JCRRepository:271 - Initialize repository +2016-04-06 19:28:00 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-06 19:28:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-06 19:28:00 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:28:00 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-06 19:28:00 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 24 ms +2016-04-06 19:28:00 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-06 19:28:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 19:28:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:28:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:28:00 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-06 19:28:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:28:00 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-06 19:28:00 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:28:00 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-06 19:28:00 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:28:00 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:28:00 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-06 19:28:00 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:28:00 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:28:00 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-06 19:28:00 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-06 19:28:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:28:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:28:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:28:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:28:00 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-06 19:28:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-06 19:28:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:28:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:28:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:28:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:28:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:28:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:28:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:28:01 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:28:01 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:28:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-06 19:28:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:28:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:28:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:28:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:28:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:28:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:28:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:28:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:28:02 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:28:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-06 19:28:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:28:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:28:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:28:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:28:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:28:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:28:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:28:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:28:02 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:28:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-06 19:28:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:28:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:28:02 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:28:02 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:28:02 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:28:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 19:28:02 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-06 19:28:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 19:28:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:28:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:28:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:28:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:28:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:28:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:28:02 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-06 19:28:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:28:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:28:03 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-06 19:28:03 INFO WorkspaceExplorerServiceImpl:188 - end time - 108 msc 0 sec +2016-04-06 19:30:46 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 19:30:46 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 19:30:46 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-06 19:30:46 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:30:46 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:30:46 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 19:30:46 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:30:46 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@99f76b6 +2016-04-06 19:30:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 19:30:46 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:30:46 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:30:46 DEBUG TemplateModel:83 - 2016-04-06 19:30:46, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:30:46 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:30:46 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:30:46 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 102 ms +2016-04-06 19:30:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-06 19:30:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-06 19:30:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-06 19:30:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-06 19:30:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-06 19:30:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-06 19:30:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-06 19:30:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-06 19:30:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-06 19:30:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-06 19:30:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-06 19:30:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-06 19:30:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-06 19:30:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-06 19:30:47 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-06 19:30:47 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:30:47 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:30:47 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@a1379cb +2016-04-06 19:30:47 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@4292d6d1 +2016-04-06 19:30:47 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@47399aee +2016-04-06 19:30:47 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@100af4a2 +2016-04-06 19:30:47 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 133 ms +2016-04-06 19:30:47 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-06 19:30:47 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-06 19:30:47 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-06 19:30:47 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-06 19:30:47 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-06 19:30:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 19:30:47 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:30:47 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-06 19:30:47 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 25 ms +2016-04-06 19:30:47 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-06 19:30:47 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:30:47 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-06 19:30:47 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 19:30:47 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:30:47 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:30:47 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-06 19:30:47 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:30:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 19:30:47 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:30:47 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:30:47 DEBUG TemplateModel:83 - 2016-04-06 19:30:47, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:30:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:30:47 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-06 19:30:48 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:30:48 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:30:48 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:30:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 19:30:48 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-06 19:30:48 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:30:48 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-06 19:30:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:30:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:30:48 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-06 19:30:48 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-06 19:30:48 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-06 19:30:48 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-06 19:30:48 DEBUG JCRRepository:271 - Initialize repository +2016-04-06 19:30:48 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-06 19:30:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-06 19:30:48 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:30:48 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-06 19:30:48 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 23 ms +2016-04-06 19:30:48 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:30:48 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-06 19:30:48 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-06 19:30:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 19:30:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:30:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:30:48 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-06 19:30:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:30:48 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-06 19:30:48 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:30:48 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:30:48 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-06 19:30:48 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:30:48 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:30:48 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-06 19:30:48 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-06 19:30:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:30:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:30:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:30:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:30:48 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-06 19:30:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-06 19:30:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:30:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:30:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:30:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:30:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:30:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:30:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:30:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:30:49 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:30:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-06 19:30:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:30:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:30:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:30:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:30:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:30:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:30:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:30:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:30:49 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:30:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-06 19:30:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:30:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:30:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:30:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:30:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:30:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:30:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:30:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:30:49 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:30:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-06 19:30:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:30:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:30:49 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:30:49 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:30:49 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:30:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 19:30:49 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-06 19:30:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 19:30:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:30:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:30:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:30:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:30:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:30:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:30:50 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-06 19:30:50 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:30:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:30:50 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-06 19:30:50 INFO WorkspaceExplorerServiceImpl:188 - end time - 105 msc 0 sec +2016-04-06 19:31:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:31:42 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:32:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 19:32:37 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 19:33:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 19:33:32 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-06 19:34:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 19:34:27 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-06 19:35:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 19:35:22 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-06 19:36:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 19:36:17 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-06 19:37:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:37:12 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:38:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:38:07 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:39:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:39:02 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 19:39:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:39:57 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 19:41:20 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 19:41:20 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 19:41:20 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-06 19:41:20 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:41:20 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:41:20 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:41:20 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:41:20 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@221dd59d +2016-04-06 19:41:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:41:20 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:41:20 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:41:20 DEBUG TemplateModel:83 - 2016-04-06 19:41:20, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:41:20 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:41:20 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:41:20 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 110 ms +2016-04-06 19:41:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-06 19:41:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-06 19:41:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-06 19:41:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-06 19:41:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-06 19:41:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-06 19:41:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-06 19:41:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-06 19:41:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-06 19:41:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-06 19:41:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-06 19:41:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-06 19:41:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-06 19:41:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-06 19:41:20 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-06 19:41:20 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:41:20 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:41:20 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6f4578cf +2016-04-06 19:41:20 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1365bd54 +2016-04-06 19:41:20 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@5480b349 +2016-04-06 19:41:20 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@7daa20bc +2016-04-06 19:41:20 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 93 ms +2016-04-06 19:41:20 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-06 19:41:20 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-06 19:41:20 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-06 19:41:20 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-06 19:41:20 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-06 19:41:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:41:20 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:41:20 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-06 19:41:20 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 31 ms +2016-04-06 19:41:20 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-06 19:41:20 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:41:20 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-06 19:41:20 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 19:41:21 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:41:21 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:41:21 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-06 19:41:21 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:41:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 19:41:21 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:41:21 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:41:21 DEBUG TemplateModel:83 - 2016-04-06 19:41:21, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:41:21 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:41:21 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-06 19:41:21 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:41:21 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:41:21 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:41:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 19:41:21 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-06 19:41:21 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:41:21 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-06 19:41:21 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:41:21 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:41:21 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-06 19:41:21 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-06 19:41:21 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-06 19:41:21 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-06 19:41:21 DEBUG JCRRepository:271 - Initialize repository +2016-04-06 19:41:21 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-06 19:41:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-06 19:41:21 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:41:21 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-06 19:41:21 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-06 19:41:21 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-06 19:41:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 19:41:21 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:41:21 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:41:21 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-06 19:41:21 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:41:21 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-06 19:41:21 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:41:21 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-06 19:41:22 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:41:22 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:41:22 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-06 19:41:22 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:41:22 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:41:22 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-06 19:41:22 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-06 19:41:22 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:41:22 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:41:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:41:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:41:22 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-06 19:41:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-06 19:41:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:41:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:41:22 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:41:22 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:41:22 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:41:22 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:41:22 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:41:22 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:41:22 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:41:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-06 19:41:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:41:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:41:22 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:41:22 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:41:22 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:41:22 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:41:22 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:41:22 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:41:22 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:41:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-06 19:41:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:41:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:41:22 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:41:22 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:41:22 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:41:22 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:41:22 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:41:22 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:41:22 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:41:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-06 19:41:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:41:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:41:23 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:41:23 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:41:23 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:41:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:41:23 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:41:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:41:23 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:41:23 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:41:23 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:41:23 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:41:23 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:41:23 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:41:23 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-06 19:41:23 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:41:23 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:41:23 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-06 19:41:23 INFO WorkspaceExplorerServiceImpl:188 - end time - 112 msc 0 sec +2016-04-06 19:42:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 19:42:16 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-06 19:43:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:43:11 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 19:44:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 19:44:06 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-06 19:48:12 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 19:48:12 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 19:48:12 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-06 19:48:12 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:48:12 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:48:12 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-06 19:48:12 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:48:12 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@783ca33c +2016-04-06 19:48:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 19:48:12 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:48:12 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:48:12 DEBUG TemplateModel:83 - 2016-04-06 19:48:12, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:48:12 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:48:12 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:48:12 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 111 ms +2016-04-06 19:48:12 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-06 19:48:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-06 19:48:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-06 19:48:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-06 19:48:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-06 19:48:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-06 19:48:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-06 19:48:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-06 19:48:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-06 19:48:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-06 19:48:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-06 19:48:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-06 19:48:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-06 19:48:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-06 19:48:13 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-06 19:48:13 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:48:13 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:48:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@5f437586 +2016-04-06 19:48:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@22033a33 +2016-04-06 19:48:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6557e2b9 +2016-04-06 19:48:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@668ffe8e +2016-04-06 19:48:13 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 127 ms +2016-04-06 19:48:13 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-06 19:48:13 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-06 19:48:13 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-06 19:48:13 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-06 19:48:13 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-06 19:48:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 19:48:13 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:48:13 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-06 19:48:13 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 32 ms +2016-04-06 19:48:13 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-06 19:48:13 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:48:13 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-06 19:48:13 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 19:48:13 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:48:13 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:48:13 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 19:48:13 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:48:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 19:48:13 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:48:13 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:48:13 DEBUG TemplateModel:83 - 2016-04-06 19:48:13, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:48:13 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:48:13 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-06 19:48:14 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:48:14 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:48:14 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:48:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 19:48:14 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-06 19:48:14 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:48:14 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-06 19:48:14 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:48:14 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:48:14 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-06 19:48:14 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-06 19:48:14 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-06 19:48:14 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-06 19:48:14 DEBUG JCRRepository:271 - Initialize repository +2016-04-06 19:48:14 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-06 19:48:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-06 19:48:14 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:48:14 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-06 19:48:14 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:48:14 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-06 19:48:14 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 95 ms +2016-04-06 19:48:14 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-06 19:48:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 19:48:14 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:48:14 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:48:14 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-06 19:48:14 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:48:14 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-06 19:48:14 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:48:14 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:48:14 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-06 19:48:14 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:48:14 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:48:14 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-06 19:48:14 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-06 19:48:14 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:48:14 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:48:14 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:48:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:48:15 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-06 19:48:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-06 19:48:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:48:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:48:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:48:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:48:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:48:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:48:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:48:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:48:15 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:48:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-06 19:48:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:48:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:48:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:48:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:48:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:48:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:48:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:48:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:48:15 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:48:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-06 19:48:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:48:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:48:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:48:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:48:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:48:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:48:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:48:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:48:15 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:48:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-06 19:48:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:48:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:48:16 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:48:16 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:48:16 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:48:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 19:48:16 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-06 19:48:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 19:48:16 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:48:16 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:48:16 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:48:16 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:48:16 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:48:16 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:48:16 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-06 19:48:16 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:48:16 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:48:16 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-06 19:48:16 INFO WorkspaceExplorerServiceImpl:188 - end time - 122 msc 0 sec +2016-04-06 19:49:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:49:08 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 19:50:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:50:03 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 19:50:41 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 19:50:41 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 19:50:41 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-06 19:50:41 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:50:41 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:50:41 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 19:50:41 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:50:41 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@1b609c59 +2016-04-06 19:50:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:50:41 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:50:41 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:50:41 DEBUG TemplateModel:83 - 2016-04-06 19:50:41, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:50:41 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:50:41 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:50:41 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 127 ms +2016-04-06 19:50:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-06 19:50:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-06 19:50:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-06 19:50:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-06 19:50:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-06 19:50:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-06 19:50:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-06 19:50:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-06 19:50:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-06 19:50:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-06 19:50:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-06 19:50:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-06 19:50:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-06 19:50:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-06 19:50:41 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-06 19:50:41 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:50:41 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:50:41 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@5ba05ee0 +2016-04-06 19:50:41 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@66939fd7 +2016-04-06 19:50:41 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@5f831edc +2016-04-06 19:50:41 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@188a5cc8 +2016-04-06 19:50:41 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 125 ms +2016-04-06 19:50:42 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-06 19:50:42 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-06 19:50:42 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-06 19:50:42 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-06 19:50:42 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-06 19:50:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:50:42 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:50:42 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-06 19:50:42 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 20 ms +2016-04-06 19:50:42 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-06 19:50:42 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:50:42 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-06 19:50:42 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 19:50:42 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:50:42 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:50:42 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-06 19:50:42 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:50:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 19:50:42 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:50:42 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:50:42 DEBUG TemplateModel:83 - 2016-04-06 19:50:42, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:50:42 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:50:42 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-06 19:50:42 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:50:42 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:50:42 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:50:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 19:50:42 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-06 19:50:42 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:50:42 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-06 19:50:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:50:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:50:42 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-06 19:50:42 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-06 19:50:42 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-06 19:50:42 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-06 19:50:42 DEBUG JCRRepository:271 - Initialize repository +2016-04-06 19:50:42 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-06 19:50:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-06 19:50:42 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:50:42 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-06 19:50:42 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 27 ms +2016-04-06 19:50:43 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-06 19:50:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 19:50:43 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:50:43 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:50:43 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-06 19:50:43 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:50:43 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-06 19:50:43 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:50:43 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-06 19:50:43 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:50:43 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:50:43 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-06 19:50:43 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:50:43 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:50:43 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-06 19:50:43 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-06 19:50:43 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:50:43 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:50:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:50:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:50:43 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-06 19:50:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-06 19:50:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:50:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:50:43 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:50:43 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:50:43 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:50:43 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:50:43 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:50:43 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:50:43 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:50:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-06 19:50:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:50:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:50:43 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:50:43 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:50:43 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:50:43 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:50:43 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:50:43 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:50:43 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:50:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-06 19:50:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:50:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:50:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:50:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:50:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:50:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:50:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:50:44 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:50:44 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:50:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-06 19:50:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:50:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:50:44 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:50:44 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:50:44 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:50:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 19:50:44 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-06 19:50:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 19:50:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:50:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:50:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:50:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:50:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:50:44 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:50:44 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-06 19:50:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:50:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:50:44 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-06 19:50:44 INFO WorkspaceExplorerServiceImpl:188 - end time - 112 msc 0 sec +2016-04-06 19:51:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:51:37 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:52:39 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 19:52:39 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 19:52:39 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-06 19:52:39 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:52:39 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:52:39 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:52:39 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:52:39 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@3524d736 +2016-04-06 19:52:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:52:39 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:52:39 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:52:39 DEBUG TemplateModel:83 - 2016-04-06 19:52:39, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:52:39 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:52:39 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:52:39 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 128 ms +2016-04-06 19:52:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-06 19:52:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-06 19:52:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-06 19:52:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-06 19:52:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-06 19:52:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-06 19:52:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-06 19:52:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-06 19:52:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-06 19:52:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-06 19:52:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-06 19:52:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-06 19:52:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-06 19:52:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-06 19:52:39 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-06 19:52:39 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:52:39 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:52:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@3feadf8b +2016-04-06 19:52:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@72edabb6 +2016-04-06 19:52:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@1ec949a8 +2016-04-06 19:52:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@3b7c3484 +2016-04-06 19:52:39 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 132 ms +2016-04-06 19:52:40 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-06 19:52:40 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-06 19:52:40 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-06 19:52:40 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-06 19:52:40 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-06 19:52:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:52:40 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:52:40 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-06 19:52:40 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 34 ms +2016-04-06 19:52:40 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-06 19:52:40 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:52:40 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-06 19:52:40 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 19:52:40 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:52:40 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:52:40 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-06 19:52:40 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:52:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 19:52:40 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:52:40 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:52:40 DEBUG TemplateModel:83 - 2016-04-06 19:52:40, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:52:40 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:52:40 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-06 19:52:40 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:52:40 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:52:40 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:52:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-06 19:52:40 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-06 19:52:40 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:52:40 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-06 19:52:40 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:52:40 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:52:40 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-06 19:52:40 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-06 19:52:40 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-06 19:52:40 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-06 19:52:40 DEBUG JCRRepository:271 - Initialize repository +2016-04-06 19:52:40 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-06 19:52:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-06 19:52:40 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:52:40 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-06 19:52:40 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 19 ms +2016-04-06 19:52:40 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:52:40 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-06 19:52:40 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-06 19:52:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-06 19:52:40 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:52:40 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:52:40 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-06 19:52:40 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:52:40 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-06 19:52:41 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:52:41 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:52:41 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-06 19:52:41 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:52:41 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:52:41 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-06 19:52:41 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-06 19:52:41 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:52:41 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:52:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:52:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:52:41 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-06 19:52:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-06 19:52:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:52:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:52:41 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:52:41 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:52:41 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:52:41 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:52:41 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:52:41 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:52:41 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:52:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-06 19:52:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:52:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:52:41 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:52:41 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:52:41 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:52:41 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:52:41 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:52:41 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:52:41 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:52:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-06 19:52:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:52:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:52:41 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:52:41 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:52:41 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:52:41 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:52:41 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:52:41 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:52:41 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:52:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-06 19:52:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:52:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:52:42 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:52:42 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:52:42 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:52:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:52:42 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 19:52:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:52:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:52:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:52:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:52:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:52:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:52:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:52:42 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-06 19:52:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:52:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:52:42 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-06 19:52:42 INFO WorkspaceExplorerServiceImpl:188 - end time - 104 msc 0 sec +2016-04-06 19:53:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 19:53:35 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-06 19:54:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:54:30 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:55:16 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 19:55:16 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 19:55:16 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-06 19:55:16 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:55:16 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:55:16 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 19:55:16 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:55:16 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@311e2e6 +2016-04-06 19:55:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 19:55:16 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:55:16 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:55:16 DEBUG TemplateModel:83 - 2016-04-06 19:55:16, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:55:16 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:55:16 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:55:16 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 117 ms +2016-04-06 19:55:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-06 19:55:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-06 19:55:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-06 19:55:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-06 19:55:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-06 19:55:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-06 19:55:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-06 19:55:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-06 19:55:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-06 19:55:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-06 19:55:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-06 19:55:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-06 19:55:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-06 19:55:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-06 19:55:17 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-06 19:55:17 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:55:17 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 19:55:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@5f6d412d +2016-04-06 19:55:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@7cf9abca +2016-04-06 19:55:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6600158f +2016-04-06 19:55:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@669ef17f +2016-04-06 19:55:17 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 109 ms +2016-04-06 19:55:17 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-06 19:55:17 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-06 19:55:17 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-06 19:55:17 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-06 19:55:17 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-06 19:55:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 19:55:17 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 19:55:17 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 19:55:17 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-06 19:55:17 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 19:55:17 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:55:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 19:55:17 INFO ASLSession:352 - Logging the entrance +2016-04-06 19:55:17 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 19:55:17 DEBUG TemplateModel:83 - 2016-04-06 19:55:17, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 19:55:17 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:55:17 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-06 19:55:17 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-06 19:55:17 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 27 ms +2016-04-06 19:55:17 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-06 19:55:17 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:55:17 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-06 19:55:17 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 19:55:17 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:55:17 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:55:17 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:55:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:55:17 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:55:17 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 19:55:17 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-06 19:55:17 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:55:17 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:55:17 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-06 19:55:17 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-06 19:55:17 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-06 19:55:17 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-06 19:55:17 DEBUG JCRRepository:271 - Initialize repository +2016-04-06 19:55:17 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-06 19:55:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-06 19:55:17 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 19:55:17 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-06 19:55:17 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 36 ms +2016-04-06 19:55:17 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-06 19:55:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:55:17 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:55:17 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:55:17 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-06 19:55:17 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:55:17 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-06 19:55:18 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:55:18 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:55:18 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-06 19:55:18 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 19:55:18 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 19:55:18 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-06 19:55:18 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-06 19:55:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:55:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:55:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:55:18 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 19:55:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:55:18 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-06 19:55:18 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-06 19:55:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-06 19:55:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:55:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:55:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:55:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:55:18 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:55:18 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:55:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:55:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:55:18 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:55:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-06 19:55:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:55:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:55:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:55:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:55:18 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:55:18 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:55:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:55:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:55:18 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:55:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-06 19:55:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:55:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:55:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:55:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:55:18 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:55:18 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:55:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:55:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:55:18 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 19:55:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-06 19:55:19 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:55:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:55:19 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 19:55:19 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 19:55:19 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 19:55:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 19:55:19 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-06 19:55:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 19:55:19 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 19:55:19 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 19:55:19 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 19:55:19 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 19:55:19 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 19:55:19 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 19:55:19 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-06 19:55:19 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 19:55:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 19:55:19 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-06 19:55:19 INFO WorkspaceExplorerServiceImpl:188 - end time - 128 msc 0 sec +2016-04-06 19:56:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:56:12 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:57:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 19:57:07 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-06 19:58:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 19:58:02 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 19:58:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:58:57 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 19:59:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 19:59:52 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 20:00:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 20:00:47 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-06 20:01:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 20:01:42 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-06 20:02:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 20:02:37 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 20:03:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 20:03:32 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 20:05:02 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 20:05:02 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 20:05:02 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-06 20:05:02 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 20:05:02 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 20:05:02 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 20:05:02 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 20:05:02 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7de34353 +2016-04-06 20:05:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 20:05:02 INFO ASLSession:352 - Logging the entrance +2016-04-06 20:05:02 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 20:05:02 DEBUG TemplateModel:83 - 2016-04-06 20:05:02, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 20:05:02 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:05:02 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 20:05:02 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 150 ms +2016-04-06 20:05:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-06 20:05:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-06 20:05:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-06 20:05:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-06 20:05:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-06 20:05:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-06 20:05:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-06 20:05:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-06 20:05:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-06 20:05:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-06 20:05:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-06 20:05:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-06 20:05:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-06 20:05:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-06 20:05:02 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-06 20:05:02 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:05:02 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 20:05:02 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@7c17896b +2016-04-06 20:05:02 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@6ac2e544 +2016-04-06 20:05:02 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@78e03e6 +2016-04-06 20:05:02 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@1dd5da39 +2016-04-06 20:05:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 145 ms +2016-04-06 20:05:03 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-06 20:05:03 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 20:05:03 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 20:05:03 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-06 20:05:03 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 20:05:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 20:05:03 INFO ASLSession:352 - Logging the entrance +2016-04-06 20:05:03 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 20:05:03 DEBUG TemplateModel:83 - 2016-04-06 20:05:03, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 20:05:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:05:03 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-06 20:05:03 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-06 20:05:03 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-06 20:05:03 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-06 20:05:03 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-06 20:05:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 20:05:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:05:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-06 20:05:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-06 20:05:03 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-06 20:05:03 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 20:05:03 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-06 20:05:03 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 20:05:03 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 20:05:03 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 20:05:03 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 20:05:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 20:05:03 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-06 20:05:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:05:03 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-06 20:05:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:05:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:05:03 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-06 20:05:03 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-06 20:05:03 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-06 20:05:03 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-06 20:05:03 DEBUG JCRRepository:271 - Initialize repository +2016-04-06 20:05:03 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-06 20:05:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-06 20:05:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:05:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-06 20:05:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 27 ms +2016-04-06 20:05:04 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-06 20:05:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 20:05:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:05:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:05:04 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-06 20:05:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:05:04 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-06 20:05:04 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 20:05:04 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-06 20:05:04 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 20:05:04 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 20:05:04 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-06 20:05:04 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 20:05:04 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 20:05:04 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-06 20:05:04 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-06 20:05:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:05:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:05:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:05:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:05:04 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-06 20:05:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-06 20:05:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:05:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:05:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:05:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:05:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:05:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:05:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:05:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:05:04 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:05:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-06 20:05:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:05:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:05:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:05:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:05:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:05:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:05:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:05:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:05:04 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:05:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-06 20:05:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:05:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:05:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:05:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:05:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:05:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:05:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:05:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:05:05 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:05:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-06 20:05:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:05:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:05:05 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 20:05:05 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 20:05:05 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 20:05:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 20:05:05 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-06 20:05:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 20:05:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:05:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:05:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:05:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:05:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:05:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:05:05 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-06 20:05:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:05:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:05:05 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-06 20:05:05 INFO WorkspaceExplorerServiceImpl:188 - end time - 118 msc 0 sec +2016-04-06 20:07:41 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 20:07:41 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 20:07:41 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-06 20:07:41 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 20:07:41 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 20:07:41 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 20:07:41 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 20:07:41 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@50c26f6 +2016-04-06 20:07:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 20:07:41 INFO ASLSession:352 - Logging the entrance +2016-04-06 20:07:41 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 20:07:41 DEBUG TemplateModel:83 - 2016-04-06 20:07:41, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 20:07:41 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:07:41 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 20:07:41 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 126 ms +2016-04-06 20:07:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-06 20:07:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-06 20:07:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-06 20:07:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-06 20:07:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-06 20:07:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-06 20:07:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-06 20:07:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-06 20:07:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-06 20:07:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-06 20:07:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-06 20:07:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-06 20:07:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-06 20:07:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-06 20:07:42 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-06 20:07:42 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:07:42 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 20:07:42 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@79a07b9b +2016-04-06 20:07:42 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@452ea63e +2016-04-06 20:07:42 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@1c02fd4c +2016-04-06 20:07:42 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@57b8a4f0 +2016-04-06 20:07:42 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 118 ms +2016-04-06 20:07:42 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-06 20:07:42 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-06 20:07:42 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-06 20:07:42 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-06 20:07:42 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-06 20:07:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 20:07:42 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:07:42 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-06 20:07:42 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 32 ms +2016-04-06 20:07:42 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-06 20:07:42 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 20:07:42 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-06 20:07:42 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 20:07:42 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 20:07:42 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 20:07:42 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-06 20:07:42 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 20:07:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 20:07:42 INFO ASLSession:352 - Logging the entrance +2016-04-06 20:07:42 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 20:07:42 DEBUG TemplateModel:83 - 2016-04-06 20:07:42, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 20:07:42 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:07:42 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-06 20:07:43 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 20:07:43 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-06 20:07:43 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 20:07:43 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 20:07:43 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 20:07:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 20:07:43 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 20:07:43 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:07:43 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-06 20:07:43 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:07:43 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:07:43 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-06 20:07:43 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-06 20:07:43 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-06 20:07:43 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-06 20:07:43 DEBUG JCRRepository:271 - Initialize repository +2016-04-06 20:07:43 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-06 20:07:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-06 20:07:43 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:07:43 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-06 20:07:43 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 16 ms +2016-04-06 20:07:43 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-06 20:07:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 20:07:43 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:07:43 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:07:43 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-06 20:07:43 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:07:43 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-06 20:07:43 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 20:07:43 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 20:07:43 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-06 20:07:43 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 20:07:43 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 20:07:43 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-06 20:07:43 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-06 20:07:43 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:07:43 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:07:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:07:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:07:44 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-06 20:07:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-06 20:07:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:07:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:07:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:07:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:07:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:07:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:07:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:07:44 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:07:44 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:07:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-06 20:07:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:07:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:07:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:07:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:07:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:07:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:07:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:07:44 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:07:44 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:07:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-06 20:07:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:07:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:07:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:07:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:07:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:07:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:07:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:07:44 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:07:44 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:07:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-06 20:07:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:07:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:07:45 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 20:07:45 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 20:07:45 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 20:07:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 20:07:45 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 20:07:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 20:07:45 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:07:45 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:07:45 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:07:45 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:07:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:07:45 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:07:45 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-06 20:07:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:07:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:07:45 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-06 20:07:45 INFO WorkspaceExplorerServiceImpl:188 - end time - 103 msc 0 sec +2016-04-06 20:08:01 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 20:08:01 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 20:08:01 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-06 20:08:01 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 20:08:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 20:08:01 INFO ASLSession:352 - Logging the entrance +2016-04-06 20:08:01 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 20:08:01 DEBUG TemplateModel:83 - 2016-04-06 20:08:01, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 20:08:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:08:01 INFO DiscoveryDelegate:77 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
(cached) +2016-04-06 20:08:01 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-06 20:08:01 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-06 20:08:01 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-06 20:08:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 20:08:01 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:08:01 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-06 20:08:01 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 21 ms +2016-04-06 20:08:01 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-06 20:08:01 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 20:08:01 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-06 20:08:01 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 20:08:01 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 20:08:01 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 20:08:01 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 20:08:01 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 20:08:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 20:08:01 INFO ASLSession:352 - Logging the entrance +2016-04-06 20:08:01 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 20:08:01 DEBUG TemplateModel:83 - 2016-04-06 20:08:01, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 20:08:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:08:01 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-06 20:08:01 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 20:08:01 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-06 20:08:02 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 20:08:02 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 20:08:02 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 20:08:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 20:08:02 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 20:08:02 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:08:02 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-06 20:08:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:08:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:08:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:08:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:08:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:08:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:08:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:08:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:08:02 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-06 20:08:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-06 20:08:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:08:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:08:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:08:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:08:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:08:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:08:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:08:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:08:02 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:08:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-06 20:08:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:08:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:08:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:08:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:08:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:08:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:08:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:08:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:08:02 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:08:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-06 20:08:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:08:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:08:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:08:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:08:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:08:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:08:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:08:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:08:02 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:08:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-06 20:08:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:08:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:08:03 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 20:08:03 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 20:08:03 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 20:08:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 20:08:03 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-06 20:08:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 20:08:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:08:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:08:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:08:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:08:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:08:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:08:03 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-06 20:08:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:08:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:08:03 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-06 20:08:03 INFO WorkspaceExplorerServiceImpl:188 - end time - 92 msc 0 sec +2016-04-06 20:08:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 20:08:56 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 20:09:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 20:09:51 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 20:10:58 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 20:10:58 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 20:10:58 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-06 20:10:58 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 20:10:58 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 20:10:58 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-06 20:10:58 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 20:10:58 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@38e037be +2016-04-06 20:10:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 20:10:58 INFO ASLSession:352 - Logging the entrance +2016-04-06 20:10:58 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 20:10:58 DEBUG TemplateModel:83 - 2016-04-06 20:10:58, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 20:10:58 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:10:58 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 20:10:58 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 119 ms +2016-04-06 20:10:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-06 20:10:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-06 20:10:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-06 20:10:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-06 20:10:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-06 20:10:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-06 20:10:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-06 20:10:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-06 20:10:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-06 20:10:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-06 20:10:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-06 20:10:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-06 20:10:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-06 20:10:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-06 20:10:58 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-06 20:10:58 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:10:58 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 20:10:58 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@4fe0f185 +2016-04-06 20:10:58 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@25f5b789 +2016-04-06 20:10:58 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@28056cf0 +2016-04-06 20:10:58 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@1b6b4115 +2016-04-06 20:10:58 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 112 ms +2016-04-06 20:10:59 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-06 20:10:59 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-06 20:10:59 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-06 20:10:59 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-06 20:10:59 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-06 20:10:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 20:10:59 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:10:59 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-06 20:10:59 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 20 ms +2016-04-06 20:10:59 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-06 20:10:59 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 20:10:59 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-06 20:10:59 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 20:10:59 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 20:10:59 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 20:10:59 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 20:10:59 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 20:10:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 20:10:59 INFO ASLSession:352 - Logging the entrance +2016-04-06 20:10:59 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 20:10:59 DEBUG TemplateModel:83 - 2016-04-06 20:10:59, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 20:10:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:10:59 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-06 20:10:59 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 20:10:59 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 20:10:59 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 20:10:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 20:10:59 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 20:10:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:10:59 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-06 20:10:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:10:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:10:59 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-06 20:10:59 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-06 20:10:59 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-06 20:10:59 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-06 20:10:59 DEBUG JCRRepository:271 - Initialize repository +2016-04-06 20:10:59 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-06 20:10:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-06 20:11:00 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:11:00 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-06 20:11:00 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 20 ms +2016-04-06 20:11:00 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 20:11:00 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-06 20:11:00 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-06 20:11:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 20:11:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:11:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:11:00 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-06 20:11:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:11:00 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-06 20:11:00 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 20:11:00 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 20:11:00 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-06 20:11:00 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 20:11:00 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 20:11:00 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-06 20:11:00 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-06 20:11:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:11:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:11:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:11:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:11:00 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-06 20:11:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-06 20:11:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:11:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:11:00 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:11:00 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:11:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:11:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:11:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:11:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:11:00 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:11:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-06 20:11:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:11:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:11:00 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:11:00 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:11:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:11:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:11:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:11:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:11:02 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:11:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-06 20:11:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:11:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:11:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:11:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:11:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:11:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:11:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:11:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:11:02 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:11:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-06 20:11:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:11:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:11:02 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 20:11:02 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 20:11:02 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 20:11:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 20:11:02 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-06 20:11:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 20:11:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:11:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:11:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:11:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:11:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:11:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:11:02 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-06 20:11:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:11:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:11:02 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-06 20:11:02 INFO WorkspaceExplorerServiceImpl:188 - end time - 104 msc 0 sec +2016-04-06 20:11:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 20:11:54 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-06 20:12:44 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 20:12:44 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 20:12:44 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-06 20:12:44 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 20:12:44 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 20:12:44 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-06 20:12:44 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 20:12:44 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@56ad5422 +2016-04-06 20:12:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 20:12:44 INFO ASLSession:352 - Logging the entrance +2016-04-06 20:12:44 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 20:12:44 DEBUG TemplateModel:83 - 2016-04-06 20:12:44, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 20:12:44 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:12:44 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 20:12:44 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 109 ms +2016-04-06 20:12:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-06 20:12:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-06 20:12:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-06 20:12:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-06 20:12:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-06 20:12:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-06 20:12:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-06 20:12:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-06 20:12:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-06 20:12:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-06 20:12:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-06 20:12:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-06 20:12:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-06 20:12:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-06 20:12:44 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-06 20:12:44 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:12:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 20:12:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6bf0773e +2016-04-06 20:12:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@763afc1a +2016-04-06 20:12:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@63f7eeb2 +2016-04-06 20:12:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@7c7a8c5a +2016-04-06 20:12:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 109 ms +2016-04-06 20:12:45 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-06 20:12:45 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-06 20:12:45 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-06 20:12:45 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-06 20:12:45 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-06 20:12:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 20:12:45 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:12:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-06 20:12:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 96 ms +2016-04-06 20:12:45 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-06 20:12:45 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 20:12:45 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-06 20:12:45 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 20:12:45 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 20:12:45 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 20:12:45 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 20:12:45 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 20:12:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 20:12:45 INFO ASLSession:352 - Logging the entrance +2016-04-06 20:12:45 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 20:12:45 DEBUG TemplateModel:83 - 2016-04-06 20:12:45, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 20:12:45 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:12:45 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-06 20:12:45 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 20:12:45 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 20:12:45 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 20:12:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 20:12:45 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 20:12:45 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:12:45 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-06 20:12:45 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:12:45 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:12:46 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-06 20:12:46 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-06 20:12:46 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-06 20:12:46 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-06 20:12:46 DEBUG JCRRepository:271 - Initialize repository +2016-04-06 20:12:46 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-06 20:12:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-06 20:12:46 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:12:46 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-06 20:12:46 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-06 20:12:46 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-06 20:12:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 20:12:46 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:12:46 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:12:46 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-06 20:12:46 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:12:46 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-06 20:12:46 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 20:12:46 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-06 20:12:46 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 20:12:46 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 20:12:46 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-06 20:12:46 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 20:12:46 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 20:12:46 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-06 20:12:46 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-06 20:12:46 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:12:46 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:12:46 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:12:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:12:46 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-06 20:12:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-06 20:12:46 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:12:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:12:46 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:12:46 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:12:46 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:12:46 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:12:46 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:12:46 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:12:46 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:12:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-06 20:12:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:12:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:12:47 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:12:47 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:12:47 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:12:47 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:12:47 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:12:47 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:12:47 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:12:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-06 20:12:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:12:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:12:47 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:12:47 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:12:47 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:12:47 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:12:47 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:12:47 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:12:47 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:12:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-06 20:12:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:12:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:12:47 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 20:12:47 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 20:12:47 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 20:12:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 20:12:47 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-06 20:12:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-06 20:12:47 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:12:47 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:12:47 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:12:47 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:12:47 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:12:47 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:12:47 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-06 20:12:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:12:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:12:47 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-06 20:12:48 INFO WorkspaceExplorerServiceImpl:188 - end time - 103 msc 0 sec +2016-04-06 20:13:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 20:13:40 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 20:14:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 20:14:35 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 20:15:53 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 20:15:53 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 20:15:53 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-06 20:15:53 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 20:15:53 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 20:15:53 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 20:15:53 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 20:15:53 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@1abffd41 +2016-04-06 20:15:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 20:15:53 INFO ASLSession:352 - Logging the entrance +2016-04-06 20:15:53 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 20:15:53 DEBUG TemplateModel:83 - 2016-04-06 20:15:53, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 20:15:53 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:15:53 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 20:15:53 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 112 ms +2016-04-06 20:15:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-06 20:15:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-06 20:15:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-06 20:15:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-06 20:15:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-06 20:15:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-06 20:15:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-06 20:15:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-06 20:15:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-06 20:15:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-06 20:15:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-06 20:15:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-06 20:15:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-06 20:15:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-06 20:15:53 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-06 20:15:53 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:15:53 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 20:15:53 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@50f7c2e +2016-04-06 20:15:53 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@81da5fa +2016-04-06 20:15:53 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@7940cac2 +2016-04-06 20:15:53 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@6afbe2e1 +2016-04-06 20:15:53 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 119 ms +2016-04-06 20:15:53 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-06 20:15:53 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 20:15:53 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 20:15:53 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-06 20:15:53 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 20:15:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 20:15:53 INFO ASLSession:352 - Logging the entrance +2016-04-06 20:15:53 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 20:15:53 DEBUG TemplateModel:83 - 2016-04-06 20:15:53, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 20:15:53 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:15:53 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-06 20:15:54 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-06 20:15:54 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-06 20:15:54 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-06 20:15:54 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-06 20:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 20:15:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:15:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-06 20:15:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 34 ms +2016-04-06 20:15:54 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-06 20:15:54 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 20:15:54 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-06 20:15:54 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 20:15:54 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 20:15:54 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 20:15:54 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 20:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 20:15:54 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-06 20:15:54 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:15:54 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-06 20:15:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:15:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:15:54 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-06 20:15:54 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-06 20:15:54 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-06 20:15:54 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-06 20:15:54 DEBUG JCRRepository:271 - Initialize repository +2016-04-06 20:15:54 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-06 20:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-06 20:15:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:15:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-06 20:15:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-06 20:15:54 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-06 20:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-06 20:15:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:15:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:15:54 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-06 20:15:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:15:54 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-06 20:15:54 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 20:15:54 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 20:15:54 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-06 20:15:54 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 20:15:54 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 20:15:54 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-06 20:15:54 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-06 20:15:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:15:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:15:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:15:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:15:55 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-06 20:15:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-06 20:15:55 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 20:15:55 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-06 20:15:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:15:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:15:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:15:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:15:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:15:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:15:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:15:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:15:55 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:15:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-06 20:15:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:15:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:15:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:15:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:15:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:15:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:15:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:15:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:15:55 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:15:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-06 20:15:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:15:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:15:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:15:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:15:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:15:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:15:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:15:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:15:55 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:15:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-06 20:15:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:15:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:15:56 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 20:15:56 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 20:15:56 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 20:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 20:15:56 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 20:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 20:15:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:15:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:15:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:15:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:15:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:15:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:15:56 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-06 20:15:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:15:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:15:56 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-06 20:15:56 INFO WorkspaceExplorerServiceImpl:188 - end time - 105 msc 0 sec +2016-04-06 20:16:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-06 20:16:49 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-06 20:18:07 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-06 20:18:07 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-06 20:18:07 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-06 20:18:07 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 20:18:07 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 20:18:07 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-06 20:18:07 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 20:18:07 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@a5c3327 +2016-04-06 20:18:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 20:18:07 INFO ASLSession:352 - Logging the entrance +2016-04-06 20:18:07 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 20:18:07 DEBUG TemplateModel:83 - 2016-04-06 20:18:07, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 20:18:07 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:18:07 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 20:18:08 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 130 ms +2016-04-06 20:18:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-06 20:18:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-06 20:18:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-06 20:18:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-06 20:18:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-06 20:18:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-06 20:18:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-06 20:18:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-06 20:18:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-06 20:18:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-06 20:18:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-06 20:18:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-06 20:18:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-06 20:18:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-06 20:18:08 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-06 20:18:08 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:18:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-06 20:18:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@7d4f973c +2016-04-06 20:18:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@a746414 +2016-04-06 20:18:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@379d9f8c +2016-04-06 20:18:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@33ae0398 +2016-04-06 20:18:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 139 ms +2016-04-06 20:18:08 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-06 20:18:08 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-06 20:18:08 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-06 20:18:08 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-06 20:18:08 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-06 20:18:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-06 20:18:08 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:18:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-06 20:18:08 INFO SessionUtil:49 - no user found in session, use test user +2016-04-06 20:18:08 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-06 20:18:08 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 20:18:08 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-06 20:18:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 20:18:08 INFO ASLSession:352 - Logging the entrance +2016-04-06 20:18:08 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-06 20:18:08 DEBUG TemplateModel:83 - 2016-04-06 20:18:08, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-06 20:18:08 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:18:08 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-06 20:18:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 27 ms +2016-04-06 20:18:08 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-06 20:18:08 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 20:18:08 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-06 20:18:08 INFO StatWPSClientSession:84 - CONNECT +2016-04-06 20:18:08 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 20:18:08 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 20:18:08 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 20:18:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 20:18:08 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-06 20:18:08 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-06 20:18:08 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-06 20:18:08 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:18:08 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:18:09 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-06 20:18:09 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-06 20:18:09 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-06 20:18:09 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-06 20:18:09 DEBUG JCRRepository:271 - Initialize repository +2016-04-06 20:18:09 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-06 20:18:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-06 20:18:09 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-06 20:18:09 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-06 20:18:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-06 20:18:09 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-06 20:18:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-06 20:18:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:18:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:18:09 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-06 20:18:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:18:09 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-06 20:18:09 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 20:18:09 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 20:18:09 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-06 20:18:09 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-06 20:18:09 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-06 20:18:09 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-06 20:18:09 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-06 20:18:09 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-06 20:18:09 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-06 20:18:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:18:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:18:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:18:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:18:09 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-06 20:18:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-06 20:18:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:18:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:18:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:18:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:18:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:18:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:18:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:18:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:18:10 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:18:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-06 20:18:10 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:18:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:18:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:18:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:18:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:18:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:18:10 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:18:10 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:18:10 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:18:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-06 20:18:10 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:18:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:18:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:18:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:18:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:18:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:18:10 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:18:10 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:18:10 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-06 20:18:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-06 20:18:10 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:18:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:18:10 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-06 20:18:10 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-06 20:18:10 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-06 20:18:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 20:18:10 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-06 20:18:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-06 20:18:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-06 20:18:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-06 20:18:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-06 20:18:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-06 20:18:10 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-06 20:18:10 DEBUG JCRHomeManager:97 - User is already logged +2016-04-06 20:18:10 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-06 20:18:10 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-06 20:18:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-06 20:18:10 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-06 20:18:10 INFO WorkspaceExplorerServiceImpl:188 - end time - 106 msc 0 sec +2016-04-06 20:19:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-06 20:19:03 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 09:03:26 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 09:03:26 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 09:03:27 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 09:03:27 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:03:27 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:03:27 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 09:03:27 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:03:27 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@77084cd1 +2016-04-07 09:03:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:03:27 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:03:27 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:03:27 DEBUG TemplateModel:83 - 2016-04-07 09:03:27, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:03:27 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:03:27 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:03:27 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:03:27 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 09:03:27 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:03:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 09:03:27 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:03:27 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:03:27 DEBUG TemplateModel:83 - 2016-04-07 09:03:27, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:03:27 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:03:27 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 09:03:27 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 09:03:27 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 119 ms +2016-04-07 09:03:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 09:03:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 09:03:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 09:03:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 09:03:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 09:03:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 09:03:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 09:03:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 09:03:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 09:03:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 09:03:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 09:03:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 09:03:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 09:03:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 09:03:27 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:03:29 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:03:29 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:03:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:03:29 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 09:03:29 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:03:29 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 09:03:29 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:03:29 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:03:29 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 09:03:29 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 09:03:29 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 09:03:29 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 09:03:29 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 09:03:30 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 09:03:30 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 09:03:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 09:03:30 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:03:30 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:03:30 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 09:03:30 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 09:03:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@793e5984 +2016-04-07 09:03:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1987c89 +2016-04-07 09:03:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@55116869 +2016-04-07 09:03:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@72a7d919 +2016-04-07 09:03:30 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 193 ms +2016-04-07 09:03:31 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 260 ms +2016-04-07 09:03:31 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 09:03:31 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 09:03:31 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 09:03:31 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 09:03:31 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 09:03:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:03:31 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 09:03:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:03:31 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:03:31 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:03:31 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 09:03:31 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:03:31 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 09:03:31 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:03:31 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 09:03:31 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-07 09:03:31 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 09:03:31 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:03:31 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 09:03:31 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 09:03:31 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 09:03:32 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 09:03:32 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 09:03:32 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 09:03:32 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 09:03:32 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 09:03:32 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 09:03:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:03:32 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:03:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:03:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:03:32 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 09:03:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 09:03:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:03:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:03:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:03:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:03:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:03:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:03:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:03:32 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:03:32 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:03:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 09:03:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:03:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:03:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:03:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:03:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:03:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:03:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:03:32 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:03:32 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:03:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 09:03:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:03:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:03:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:03:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:03:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:03:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:03:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:03:32 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:03:32 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:03:32 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:03:32 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 09:03:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 09:03:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:03:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:03:33 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:03:33 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:03:33 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:03:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:03:33 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 09:03:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:03:33 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:03:33 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:03:33 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:03:33 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:03:33 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:03:33 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:03:33 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 09:03:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:03:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:03:33 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 09:03:33 INFO WorkspaceExplorerServiceImpl:188 - end time - 187 msc 0 sec +2016-04-07 09:04:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:04:22 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 09:05:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 09:05:17 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 09:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:06:12 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 09:07:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 09:07:07 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 09:08:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 09:08:02 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 09:08:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 09:08:57 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 09:09:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 09:09:52 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 09:10:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 09:10:47 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 09:11:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 09:11:42 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 09:12:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 09:12:37 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 09:16:44 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 09:16:44 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 09:16:44 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 09:16:44 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:16:44 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:16:44 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 09:16:44 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:16:44 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@23aa2b98 +2016-04-07 09:16:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 09:16:44 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:16:44 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:16:44 DEBUG TemplateModel:83 - 2016-04-07 09:16:44, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:16:44 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:16:44 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 09:16:44 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 116 ms +2016-04-07 09:16:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 09:16:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 09:16:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 09:16:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 09:16:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 09:16:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 09:16:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 09:16:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 09:16:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 09:16:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 09:16:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 09:16:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 09:16:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 09:16:44 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 09:16:45 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 09:16:45 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:16:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 09:16:45 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:16:45 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:16:45 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 09:16:45 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:16:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 09:16:45 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:16:45 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:16:45 DEBUG TemplateModel:83 - 2016-04-07 09:16:45, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:16:45 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:16:45 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 09:16:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@64ac43c0 +2016-04-07 09:16:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1e6aa612 +2016-04-07 09:16:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@25237cda +2016-04-07 09:16:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@13338a9f +2016-04-07 09:16:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 470 ms +2016-04-07 09:16:46 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 09:16:46 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 09:16:46 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 09:16:46 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 09:16:46 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 09:16:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 09:16:46 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:16:46 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:16:46 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:16:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:16:46 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 09:16:46 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:16:46 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 09:16:46 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:16:46 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:16:46 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:16:46 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 09:16:46 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 22 ms +2016-04-07 09:16:46 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 09:16:46 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:16:46 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 09:16:46 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 09:16:46 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 09:16:46 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 09:16:46 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 09:16:46 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 09:16:46 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 09:16:46 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 09:16:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 09:16:46 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:16:46 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 09:16:46 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 92 ms +2016-04-07 09:16:46 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 09:16:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:16:46 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:16:46 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:16:46 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 09:16:46 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:16:46 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 09:16:50 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:16:50 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 09:16:50 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 09:16:50 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 09:16:50 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 09:16:50 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 09:16:50 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 09:16:50 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 09:16:50 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 09:16:50 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:16:50 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:16:50 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:16:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:16:50 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 09:16:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 09:16:50 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:16:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:16:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:16:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:16:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:16:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:16:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:16:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:16:51 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:16:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 09:16:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:16:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:16:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:16:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:16:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:16:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:16:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:16:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:16:51 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:16:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 09:16:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:16:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:16:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:16:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:16:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:16:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:16:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:16:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:16:51 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:16:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 09:16:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:16:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:16:51 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:16:52 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:16:52 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:16:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:16:52 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 09:16:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:16:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:16:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:16:52 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:16:52 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:16:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:16:52 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:16:52 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 09:16:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:16:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:16:52 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 09:16:52 INFO WorkspaceExplorerServiceImpl:188 - end time - 143 msc 0 sec +2016-04-07 09:17:14 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:17:14 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:17:14 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 09:17:14 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:17:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 09:17:14 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:17:14 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:17:14 DEBUG TemplateModel:83 - 2016-04-07 09:17:14, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:17:14 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:17:14 INFO DiscoveryDelegate:77 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
(cached) +2016-04-07 09:17:14 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 09:17:14 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 09:17:14 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 09:17:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 09:17:14 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:17:14 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 09:17:14 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-07 09:17:14 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 09:17:14 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:17:14 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 09:17:14 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 09:17:14 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:17:14 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:17:14 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 09:17:14 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:17:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:17:14 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:17:14 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:17:14 DEBUG TemplateModel:83 - 2016-04-07 09:17:14, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:17:14 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:17:14 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 09:17:14 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:17:14 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 09:17:15 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:17:15 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:17:15 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:17:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:17:15 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 09:17:15 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:17:15 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 09:17:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:17:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:17:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:17:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:17:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:17:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:17:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:17:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:17:15 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 09:17:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 09:17:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:17:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:17:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:17:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:17:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:17:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:17:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:17:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:17:15 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:17:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 09:17:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:17:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:17:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:17:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:17:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:17:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:17:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:17:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:17:15 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:17:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 09:17:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:17:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:17:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:17:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:17:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:17:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:17:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:17:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:17:15 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:17:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 09:17:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:17:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:17:15 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:17:15 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:17:15 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:17:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:17:15 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 09:17:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:17:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:17:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:17:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:17:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:17:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:17:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:17:15 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 09:17:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:17:16 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:17:16 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 09:17:16 INFO WorkspaceExplorerServiceImpl:188 - end time - 71 msc 0 sec +2016-04-07 09:18:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:18:09 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 09:19:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:19:04 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 09:19:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:19:59 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 09:20:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 09:20:54 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 09:21:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:21:49 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 09:22:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 09:22:44 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 09:23:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 09:23:39 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 09:24:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:24:34 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 09:25:18 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 09:25:18 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 09:25:18 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 09:25:18 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:25:18 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:25:18 DEBUG ASLSession:458 - Getting security token: null in thread 28 +2016-04-07 09:25:18 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:25:18 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@6426fb68 +2016-04-07 09:25:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-07 09:25:18 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:25:18 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:25:18 DEBUG TemplateModel:83 - 2016-04-07 09:25:18, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:25:18 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:25:18 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 09:25:18 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 121 ms +2016-04-07 09:25:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 09:25:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 09:25:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 09:25:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 09:25:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 09:25:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 09:25:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 09:25:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 09:25:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 09:25:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 09:25:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 09:25:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 09:25:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 09:25:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 09:25:18 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 09:25:18 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:25:18 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 09:25:18 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@dfba993 +2016-04-07 09:25:18 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@3d59c29f +2016-04-07 09:25:18 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@19838e3b +2016-04-07 09:25:18 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@2a3f020f +2016-04-07 09:25:18 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 94 ms +2016-04-07 09:25:19 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 09:25:19 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 09:25:19 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 09:25:19 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 09:25:19 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 09:25:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-07 09:25:19 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:25:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 09:25:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-07 09:25:19 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 09:25:19 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:25:19 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 09:25:19 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 09:25:19 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:25:19 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:25:19 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 09:25:19 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:25:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:25:19 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:25:19 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:25:19 DEBUG TemplateModel:83 - 2016-04-07 09:25:19, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:25:19 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:25:19 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 09:25:19 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:25:19 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:25:19 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:25:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:25:19 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 09:25:19 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:25:19 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 09:25:19 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:25:19 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:25:19 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 09:25:19 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 09:25:19 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 09:25:19 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 09:25:19 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 09:25:19 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 09:25:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 09:25:19 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:25:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 09:25:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 68 ms +2016-04-07 09:25:19 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 09:25:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:25:19 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:25:19 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:25:19 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 09:25:19 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:25:19 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 09:25:20 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:25:20 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 09:25:20 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 09:25:20 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 09:25:20 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 09:25:20 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 09:25:20 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 09:25:20 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 09:25:20 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 09:25:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:25:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:25:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:25:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:25:20 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 09:25:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 09:25:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:25:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:25:20 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:25:20 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:25:20 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:25:20 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:25:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:25:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:25:20 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:25:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 09:25:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:25:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:25:20 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:25:20 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:25:20 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:25:20 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:25:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:25:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:25:20 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:25:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 09:25:21 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:25:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:25:21 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:25:21 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:25:21 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:25:21 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:25:21 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:25:21 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:25:21 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:25:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 09:25:21 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:25:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:25:21 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:25:21 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:25:21 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:25:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:25:21 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 09:25:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:25:21 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:25:21 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:25:21 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:25:21 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:25:21 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:25:21 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:25:21 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 09:25:21 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:25:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:25:21 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 09:25:21 INFO WorkspaceExplorerServiceImpl:188 - end time - 127 msc 0 sec +2016-04-07 09:26:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 09:26:14 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 09:27:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 09:27:09 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 09:28:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 09:28:04 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 09:29:48 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 09:29:48 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 09:29:48 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 09:29:48 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:29:48 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:29:48 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 09:29:48 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:29:48 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@57c2d367 +2016-04-07 09:29:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 09:29:48 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:29:48 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:29:48 DEBUG TemplateModel:83 - 2016-04-07 09:29:48, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:29:48 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:29:48 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 09:29:48 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 115 ms +2016-04-07 09:29:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 09:29:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 09:29:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 09:29:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 09:29:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 09:29:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 09:29:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 09:29:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 09:29:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 09:29:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 09:29:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 09:29:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 09:29:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 09:29:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 09:29:48 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 09:29:48 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:29:48 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 09:29:48 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@55b76347 +2016-04-07 09:29:48 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@2125bd3 +2016-04-07 09:29:48 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@5310a36c +2016-04-07 09:29:48 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@2648c665 +2016-04-07 09:29:48 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 110 ms +2016-04-07 09:29:48 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 09:29:48 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 09:29:48 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 09:29:48 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 09:29:48 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 09:29:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 09:29:48 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:29:48 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 09:29:49 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 23 ms +2016-04-07 09:29:49 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 09:29:49 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:29:49 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 09:29:49 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 09:29:49 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:29:49 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:29:49 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 09:29:49 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:29:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 09:29:49 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:29:49 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:29:49 DEBUG TemplateModel:83 - 2016-04-07 09:29:49, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:29:49 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:29:49 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 09:29:49 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:29:49 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:29:49 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:29:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 09:29:49 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 09:29:49 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:29:49 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 09:29:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:29:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:29:49 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 09:29:49 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 09:29:49 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 09:29:49 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 09:29:49 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 09:29:49 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 09:29:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 09:29:49 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:29:49 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 09:29:49 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 77 ms +2016-04-07 09:29:49 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 09:29:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 09:29:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:29:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:29:49 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 09:29:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:29:49 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 09:29:49 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:29:49 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 09:29:50 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 09:29:50 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 09:29:50 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 09:29:50 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 09:29:50 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 09:29:50 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 09:29:50 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 09:29:50 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:29:50 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:29:50 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:29:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:29:50 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 09:29:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 09:29:50 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:29:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:29:50 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:29:50 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:29:50 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:29:50 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:29:50 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:29:50 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:29:50 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:29:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 09:29:50 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:29:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:29:50 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:29:50 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:29:50 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:29:50 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:29:50 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:29:50 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:29:50 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:29:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 09:29:50 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:29:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:29:50 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:29:50 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:29:50 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:29:50 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:29:50 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:29:50 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:29:50 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:29:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 09:29:50 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:29:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:29:51 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:29:51 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:29:51 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:29:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 09:29:51 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 09:29:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 09:29:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:29:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:29:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:29:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:29:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:29:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:29:51 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 09:29:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:29:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:29:51 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 09:29:51 INFO WorkspaceExplorerServiceImpl:188 - end time - 110 msc 0 sec +2016-04-07 09:30:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:30:44 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 09:31:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:31:39 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 09:32:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:32:34 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 09:33:22 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 09:33:22 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 09:33:22 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 09:33:22 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:33:22 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:33:22 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 09:33:22 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:33:22 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2c415ffb +2016-04-07 09:33:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:33:22 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:33:22 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:33:22 DEBUG TemplateModel:83 - 2016-04-07 09:33:22, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:33:22 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:33:22 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 09:33:22 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 98 ms +2016-04-07 09:33:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 09:33:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 09:33:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 09:33:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 09:33:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 09:33:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 09:33:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 09:33:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 09:33:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 09:33:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 09:33:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 09:33:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 09:33:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 09:33:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 09:33:22 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 09:33:23 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:33:23 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 09:33:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@2cb9f563 +2016-04-07 09:33:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@3dfe8cac +2016-04-07 09:33:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6be516bd +2016-04-07 09:33:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@ac71e4 +2016-04-07 09:33:23 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 110 ms +2016-04-07 09:33:23 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 09:33:23 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 09:33:23 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 09:33:23 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 09:33:23 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 09:33:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:33:23 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:33:23 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 09:33:23 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 31 ms +2016-04-07 09:33:23 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 09:33:23 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:33:23 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 09:33:23 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 09:33:23 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:33:23 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:33:23 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 09:33:23 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:33:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:33:23 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:33:23 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:33:23 DEBUG TemplateModel:83 - 2016-04-07 09:33:23, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:33:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:33:23 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 09:33:23 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:33:23 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:33:23 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:33:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 09:33:23 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 09:33:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:33:23 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 09:33:23 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:33:23 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:33:23 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 09:33:23 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 09:33:23 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 09:33:23 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 09:33:23 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 09:33:23 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 09:33:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 09:33:24 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:33:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 09:33:24 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:33:24 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 09:33:24 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 933 ms +2016-04-07 09:33:25 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 09:33:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 09:33:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:33:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:33:25 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 09:33:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:33:25 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 09:33:25 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 09:33:25 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 09:33:25 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 09:33:25 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 09:33:25 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 09:33:25 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 09:33:25 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 09:33:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:33:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:33:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:33:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:33:25 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 09:33:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 09:33:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:33:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:33:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:33:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:33:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:33:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:33:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:33:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:33:25 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:33:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 09:33:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:33:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:33:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:33:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:33:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:33:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:33:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:33:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:33:25 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:33:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 09:33:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:33:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:33:26 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:33:26 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:33:26 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:33:26 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:33:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:33:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:33:26 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:33:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 09:33:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:33:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:33:26 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:33:26 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:33:26 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:33:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:33:26 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 09:33:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:33:26 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:33:26 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:33:26 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:33:26 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:33:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:33:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:33:26 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 09:33:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:33:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:33:26 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 09:33:26 INFO WorkspaceExplorerServiceImpl:188 - end time - 104 msc 0 sec +2016-04-07 09:34:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 09:34:18 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 09:35:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 09:35:13 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 09:36:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 09:36:08 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 09:37:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 09:37:03 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 09:37:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:37:58 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 09:38:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:38:53 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 09:39:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:39:48 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 09:40:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 09:40:43 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 09:41:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:41:38 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 09:43:23 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 09:43:23 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 09:43:23 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 09:43:23 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:43:23 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:43:23 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 09:43:23 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:43:23 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@60b690b8 +2016-04-07 09:43:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:43:23 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:43:23 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:43:23 DEBUG TemplateModel:83 - 2016-04-07 09:43:23, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:43:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:43:23 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 09:43:23 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 139 ms +2016-04-07 09:43:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 09:43:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 09:43:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 09:43:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 09:43:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 09:43:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 09:43:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 09:43:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 09:43:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 09:43:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 09:43:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 09:43:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 09:43:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 09:43:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 09:43:23 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 09:43:23 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:43:23 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 09:43:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@5c161127 +2016-04-07 09:43:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@7ef6480b +2016-04-07 09:43:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@5deeb73a +2016-04-07 09:43:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@1f9a09ad +2016-04-07 09:43:24 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 128 ms +2016-04-07 09:43:24 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 09:43:24 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:43:24 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:43:24 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 09:43:24 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:43:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 09:43:24 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:43:24 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:43:24 DEBUG TemplateModel:83 - 2016-04-07 09:43:24, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:43:24 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:43:24 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 09:43:24 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 09:43:24 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 09:43:24 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 09:43:24 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 09:43:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:43:24 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:43:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 09:43:24 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-07 09:43:24 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 09:43:24 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:43:24 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 09:43:24 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 09:43:24 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:43:24 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:43:24 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:43:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 09:43:24 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 09:43:24 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:43:24 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 09:43:24 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:43:24 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:43:24 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 09:43:24 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 09:43:24 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 09:43:24 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 09:43:24 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 09:43:24 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 09:43:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 09:43:24 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:43:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 09:43:24 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 86 ms +2016-04-07 09:43:24 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 09:43:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 09:43:24 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:43:24 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:43:24 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 09:43:24 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:43:24 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 09:43:25 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 09:43:25 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 09:43:25 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 09:43:25 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 09:43:25 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 09:43:25 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 09:43:25 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 09:43:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:43:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:43:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:43:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:43:25 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:43:25 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 09:43:25 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 09:43:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 09:43:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:43:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:43:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:43:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:43:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:43:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:43:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:43:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:43:25 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:43:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 09:43:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:43:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:43:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:43:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:43:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:43:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:43:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:43:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:43:25 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:43:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 09:43:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:43:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:43:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:43:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:43:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:43:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:43:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:43:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:43:25 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:43:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 09:43:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:43:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:43:26 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:43:26 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:43:26 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:43:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:43:26 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 09:43:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:43:26 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:43:26 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:43:26 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:43:26 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:43:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:43:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:43:26 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 09:43:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:43:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:43:26 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 09:43:26 INFO WorkspaceExplorerServiceImpl:188 - end time - 108 msc 0 sec +2016-04-07 09:43:47 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:43:47 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:43:47 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 09:43:47 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:43:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:43:47 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:43:47 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:43:47 DEBUG TemplateModel:83 - 2016-04-07 09:43:47, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:43:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:43:47 INFO DiscoveryDelegate:77 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
(cached) +2016-04-07 09:43:48 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 09:43:48 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 09:43:48 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 09:43:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:43:48 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:43:48 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 09:43:48 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 25 ms +2016-04-07 09:43:48 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 09:43:48 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:43:48 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 09:43:48 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 09:43:48 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:43:48 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:43:48 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 09:43:48 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:43:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:43:48 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:43:48 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:43:48 DEBUG TemplateModel:83 - 2016-04-07 09:43:48, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:43:48 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:43:48 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 09:43:48 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:43:48 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 09:43:48 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:43:48 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:43:48 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:43:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 09:43:48 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 09:43:48 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:43:48 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 09:43:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:43:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:43:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:43:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:43:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:43:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:43:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:43:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:43:49 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 09:43:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 09:43:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:43:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:43:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:43:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:43:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:43:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:43:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:43:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:43:49 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:43:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 09:43:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:43:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:43:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:43:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:43:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:43:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:43:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:43:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:43:49 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:43:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 09:43:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:43:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:43:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:43:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:43:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:43:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:43:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:43:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:43:49 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:43:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 09:43:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:43:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:43:49 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:43:49 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:43:49 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:43:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:43:49 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 09:43:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:43:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:43:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:43:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:43:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:43:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:43:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:43:49 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 09:43:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:43:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:43:49 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 09:43:49 INFO WorkspaceExplorerServiceImpl:188 - end time - 69 msc 0 sec +2016-04-07 09:44:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:44:43 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 09:45:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 09:45:38 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 09:46:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:46:33 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 09:47:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:47:28 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 09:48:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:48:23 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 09:49:46 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 09:49:46 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 09:49:46 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 09:49:46 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:49:46 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:49:46 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 09:49:46 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:49:46 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@65de923d +2016-04-07 09:49:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:49:46 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:49:46 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:49:46 DEBUG TemplateModel:83 - 2016-04-07 09:49:46, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:49:46 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:49:46 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 09:49:46 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 117 ms +2016-04-07 09:49:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 09:49:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 09:49:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 09:49:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 09:49:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 09:49:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 09:49:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 09:49:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 09:49:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 09:49:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 09:49:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 09:49:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 09:49:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 09:49:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 09:49:46 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 09:49:46 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:49:46 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 09:49:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@3d6cd030 +2016-04-07 09:49:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@15253d78 +2016-04-07 09:49:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@24297b19 +2016-04-07 09:49:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@4783ed7 +2016-04-07 09:49:46 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 115 ms +2016-04-07 09:49:46 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:49:46 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:49:46 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 09:49:46 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:49:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 09:49:46 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:49:46 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 09:49:46 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:49:46 DEBUG TemplateModel:83 - 2016-04-07 09:49:46, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:49:46 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:49:46 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 09:49:47 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 09:49:47 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 09:49:47 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 09:49:47 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 09:49:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:49:47 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:49:47 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 09:49:47 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 19 ms +2016-04-07 09:49:47 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 09:49:47 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:49:47 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 09:49:47 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 09:49:47 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:49:47 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:49:47 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:49:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 09:49:47 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 09:49:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:49:47 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 09:49:47 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:49:47 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:49:47 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 09:49:47 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 09:49:47 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 09:49:47 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 09:49:47 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 09:49:47 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 09:49:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 09:49:47 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:49:47 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 09:49:47 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 113 ms +2016-04-07 09:49:47 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 09:49:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 09:49:47 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:49:47 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:49:47 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 09:49:47 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:49:47 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 09:49:48 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 09:49:48 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:49:48 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 09:49:48 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 09:49:48 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 09:49:48 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 09:49:48 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 09:49:48 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 09:49:48 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 09:49:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:49:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:49:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:49:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:49:48 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 09:49:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 09:49:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:49:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:49:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:49:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:49:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:49:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:49:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:49:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:49:48 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:49:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 09:49:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:49:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:49:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:49:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:49:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:49:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:49:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:49:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:49:48 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:49:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 09:49:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:49:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:49:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:49:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:49:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:49:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:49:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:49:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:49:48 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:49:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 09:49:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:49:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:49:50 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:49:50 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:49:50 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:49:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:49:50 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 09:49:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:49:50 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:49:50 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:49:50 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:49:50 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:49:50 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:49:50 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:49:50 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 09:49:50 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:49:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:49:50 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 09:49:50 INFO WorkspaceExplorerServiceImpl:188 - end time - 112 msc 0 sec +2016-04-07 09:50:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 09:50:42 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 09:54:02 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 09:54:02 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 09:54:02 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 09:54:02 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:54:02 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:54:02 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 09:54:02 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:54:02 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@73147fba +2016-04-07 09:54:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:54:02 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:54:02 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:54:02 DEBUG TemplateModel:83 - 2016-04-07 09:54:02, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:54:02 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:54:02 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 09:54:02 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 126 ms +2016-04-07 09:54:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 09:54:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 09:54:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 09:54:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 09:54:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 09:54:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 09:54:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 09:54:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 09:54:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 09:54:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 09:54:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 09:54:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 09:54:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 09:54:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 09:54:02 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 09:54:03 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:54:03 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:54:03 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:54:03 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 09:54:03 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:54:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:54:03 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:54:03 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:54:03 DEBUG TemplateModel:83 - 2016-04-07 09:54:03, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:54:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:54:03 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 09:54:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 09:54:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@3b476889 +2016-04-07 09:54:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@5048ede0 +2016-04-07 09:54:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@1831b9aa +2016-04-07 09:54:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@1c665515 +2016-04-07 09:54:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 277 ms +2016-04-07 09:54:03 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 09:54:03 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:54:03 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:54:03 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:54:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 09:54:03 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 09:54:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:54:03 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 09:54:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:54:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:54:03 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 09:54:03 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 09:54:03 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 09:54:03 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 09:54:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:54:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:54:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 09:54:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 23 ms +2016-04-07 09:54:03 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 09:54:03 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:54:03 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 09:54:03 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 09:54:04 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 09:54:04 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 09:54:04 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 09:54:04 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 09:54:04 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 09:54:04 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 09:54:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-07 09:54:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:54:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 09:54:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 163 ms +2016-04-07 09:54:04 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 09:54:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 09:54:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:54:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:54:04 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 09:54:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:54:04 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 09:54:04 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 09:54:05 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:54:05 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 09:54:05 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 09:54:05 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 09:54:05 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 09:54:05 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 09:54:05 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 09:54:05 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 09:54:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:54:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:54:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:54:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:54:05 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 09:54:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 09:54:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:54:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:54:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:54:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:54:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:54:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:54:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:54:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:54:05 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:54:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 09:54:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:54:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:54:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:54:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:54:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:54:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:54:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:54:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:54:05 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:54:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 09:54:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:54:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:54:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:54:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:54:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:54:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:54:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:54:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:54:05 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:54:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 09:54:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:54:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:54:06 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:54:06 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:54:06 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:54:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 09:54:06 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 09:54:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 09:54:06 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:54:06 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:54:06 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:54:06 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:54:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:54:06 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:54:06 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 09:54:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:54:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:54:06 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 09:54:06 INFO WorkspaceExplorerServiceImpl:188 - end time - 169 msc 0 sec +2016-04-07 09:54:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 09:54:58 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 09:55:36 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:55:36 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:55:36 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 09:55:36 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:55:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:55:36 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:55:36 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:55:36 DEBUG TemplateModel:83 - 2016-04-07 09:55:36, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:55:36 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:55:36 INFO DiscoveryDelegate:77 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
(cached) +2016-04-07 09:55:36 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 09:55:36 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 09:55:36 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 09:55:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:55:36 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:55:36 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 09:55:36 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 94 ms +2016-04-07 09:55:36 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 09:55:36 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:55:36 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 09:55:36 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 09:55:36 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:55:36 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:55:36 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 09:55:36 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:55:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:55:36 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:55:36 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:55:36 DEBUG TemplateModel:83 - 2016-04-07 09:55:36, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:55:36 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:55:36 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 09:55:36 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:55:36 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 09:55:37 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:55:37 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:55:37 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:55:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 09:55:37 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 09:55:37 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:55:37 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 09:55:37 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:55:37 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:55:37 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:55:37 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:55:37 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:55:37 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:55:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:55:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:55:37 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 09:55:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 09:55:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:55:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:55:37 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:55:37 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:55:37 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:55:37 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:55:37 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:55:37 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:55:37 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:55:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 09:55:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:55:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:55:37 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:55:37 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:55:37 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:55:37 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:55:37 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:55:37 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:55:37 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:55:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 09:55:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:55:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:55:37 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:55:37 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:55:37 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:55:37 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:55:37 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:55:37 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:55:37 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:55:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 09:55:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:55:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:55:38 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:55:38 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:55:38 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:55:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:55:38 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 09:55:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:55:38 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:55:38 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:55:38 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:55:38 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:55:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:55:38 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:55:38 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 09:55:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:55:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:55:38 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 09:55:38 INFO WorkspaceExplorerServiceImpl:188 - end time - 78 msc 0 sec +2016-04-07 09:56:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 09:56:31 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 09:57:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 09:57:26 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 09:58:10 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 09:58:10 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 09:58:10 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 09:58:10 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:58:10 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:58:10 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 09:58:10 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:58:10 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@18b705b +2016-04-07 09:58:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:58:10 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:58:10 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:58:10 DEBUG TemplateModel:83 - 2016-04-07 09:58:10, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:58:10 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:58:10 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 09:58:10 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 112 ms +2016-04-07 09:58:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 09:58:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 09:58:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 09:58:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 09:58:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 09:58:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 09:58:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 09:58:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 09:58:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 09:58:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 09:58:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 09:58:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 09:58:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 09:58:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 09:58:10 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 09:58:10 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:58:10 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 09:58:10 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@1b5b6e7e +2016-04-07 09:58:10 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1c8442b0 +2016-04-07 09:58:10 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@483a4f5a +2016-04-07 09:58:10 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@5b52b13c +2016-04-07 09:58:10 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 104 ms +2016-04-07 09:58:10 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 09:58:10 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 09:58:10 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 09:58:10 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 09:58:10 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 09:58:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:58:10 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:58:10 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 09:58:10 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 35 ms +2016-04-07 09:58:10 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 09:58:10 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:58:10 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 09:58:10 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 09:58:11 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:58:11 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:58:11 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 09:58:11 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:58:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 09:58:11 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:58:11 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:58:11 DEBUG TemplateModel:83 - 2016-04-07 09:58:11, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:58:11 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:58:11 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 09:58:11 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:58:11 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:58:11 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:58:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 09:58:11 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 09:58:11 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:58:11 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 09:58:11 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:58:11 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:58:11 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 09:58:11 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 09:58:11 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 09:58:11 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 09:58:11 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 09:58:11 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 09:58:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 09:58:11 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:58:11 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 09:58:11 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 74 ms +2016-04-07 09:58:11 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 09:58:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 09:58:11 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:58:11 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:58:11 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 09:58:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:58:11 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 09:58:12 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:58:12 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 09:58:12 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 09:58:12 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 09:58:12 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 09:58:12 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 09:58:12 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 09:58:12 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 09:58:12 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 09:58:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:58:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:58:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:58:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:58:12 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 09:58:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 09:58:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:58:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:58:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:58:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:58:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:58:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:58:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:58:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:58:12 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:58:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 09:58:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:58:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:58:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:58:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:58:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:58:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:58:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:58:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:58:12 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:58:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 09:58:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:58:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:58:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:58:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:58:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:58:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:58:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:58:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:58:12 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:58:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 09:58:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:58:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:58:13 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:58:13 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:58:13 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:58:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 09:58:13 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 09:58:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 09:58:13 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:58:13 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:58:13 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:58:13 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:58:13 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:58:13 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:58:13 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 09:58:13 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:58:13 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:58:13 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 09:58:13 INFO WorkspaceExplorerServiceImpl:188 - end time - 137 msc 0 sec +2016-04-07 09:58:50 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:58:50 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:58:50 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 09:58:50 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:58:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 09:58:50 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:58:50 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:58:50 DEBUG TemplateModel:83 - 2016-04-07 09:58:50, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:58:50 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:58:50 INFO DiscoveryDelegate:77 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
(cached) +2016-04-07 09:58:50 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 09:58:50 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 09:58:50 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 09:58:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 09:58:50 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 09:58:50 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 09:58:50 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 55 ms +2016-04-07 09:58:50 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 09:58:50 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:58:50 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 09:58:50 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 09:58:51 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 09:58:51 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 09:58:51 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 09:58:51 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 09:58:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:58:51 INFO ASLSession:352 - Logging the entrance +2016-04-07 09:58:51 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 09:58:51 DEBUG TemplateModel:83 - 2016-04-07 09:58:51, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 09:58:51 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:58:51 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 09:58:51 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 09:58:51 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 09:58:51 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:58:51 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:58:51 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:58:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 09:58:51 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 09:58:51 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 09:58:51 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 09:58:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:58:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:58:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:58:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:58:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:58:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:58:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:58:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:58:51 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 09:58:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 09:58:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:58:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:58:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:58:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:58:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:58:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:58:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:58:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:58:51 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:58:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 09:58:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:58:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:58:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:58:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:58:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:58:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:58:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:58:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:58:51 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:58:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 09:58:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:58:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:58:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:58:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:58:52 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:58:52 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:58:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:58:52 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:58:52 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 09:58:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 09:58:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:58:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:58:52 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 09:58:52 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 09:58:52 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 09:58:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:58:52 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 09:58:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 09:58:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 09:58:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 09:58:52 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 09:58:52 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 09:58:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 09:58:52 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 09:58:52 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 09:58:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 09:58:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 09:58:52 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 09:58:52 INFO WorkspaceExplorerServiceImpl:188 - end time - 70 msc 0 sec +2016-04-07 09:59:59 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 09:59:59 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 09:59:59 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 10:00:00 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 10:00:00 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 10:00:00 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 10:00:00 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 10:00:00 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@57a14172 +2016-04-07 10:00:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 10:00:00 INFO ASLSession:352 - Logging the entrance +2016-04-07 10:00:00 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 10:00:00 DEBUG TemplateModel:83 - 2016-04-07 10:00:00, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 10:00:00 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 10:00:00 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 10:00:00 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 110 ms +2016-04-07 10:00:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 10:00:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 10:00:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 10:00:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 10:00:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 10:00:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 10:00:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 10:00:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 10:00:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 10:00:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 10:00:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 10:00:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 10:00:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 10:00:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 10:00:00 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 10:00:00 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 10:00:00 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 10:00:00 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@21b80c65 +2016-04-07 10:00:00 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@7c59ed9 +2016-04-07 10:00:00 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@1633da69 +2016-04-07 10:00:00 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@39feeba2 +2016-04-07 10:00:00 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 99 ms +2016-04-07 10:00:00 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 10:00:00 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 10:00:00 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 10:00:00 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 10:00:00 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 10:00:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 10:00:00 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 10:00:00 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 10:00:00 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 28 ms +2016-04-07 10:00:00 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 10:00:00 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 10:00:00 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 10:00:00 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 10:00:01 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 10:00:01 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 10:00:01 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 10:00:01 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 10:00:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:00:01 INFO ASLSession:352 - Logging the entrance +2016-04-07 10:00:01 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 10:00:01 DEBUG TemplateModel:83 - 2016-04-07 10:00:01, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 10:00:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 10:00:01 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 10:00:01 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 10:00:01 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 10:00:01 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 10:00:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 10:00:01 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 10:00:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 10:00:01 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 10:00:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:00:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:00:01 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 10:00:01 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 10:00:01 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 10:00:01 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 10:00:01 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 10:00:01 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 10:00:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 10:00:01 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 10:00:01 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 10:00:01 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 98 ms +2016-04-07 10:00:01 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 10:00:01 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 10:00:01 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 10:00:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 10:00:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:00:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:00:01 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 10:00:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:00:01 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 10:00:03 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 10:00:03 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 10:00:03 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 10:00:03 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 10:00:03 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 10:00:03 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 10:00:03 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 10:00:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:00:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:00:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:00:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:00:03 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 10:00:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 10:00:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:00:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:00:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:00:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:00:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:00:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:00:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:00:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:00:03 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 10:00:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 10:00:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:00:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:00:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:00:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:00:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:00:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:00:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:00:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:00:03 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 10:00:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 10:00:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:00:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:00:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:00:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:00:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:00:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:00:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:00:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:00:03 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 10:00:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 10:00:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:00:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:00:04 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 10:00:04 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 10:00:04 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 10:00:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 10:00:04 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 10:00:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 10:00:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:00:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:00:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:00:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:00:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:00:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:00:04 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 10:00:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:00:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:00:04 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 10:00:04 INFO WorkspaceExplorerServiceImpl:188 - end time - 118 msc 0 sec +2016-04-07 10:00:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:00:56 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 10:04:53 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 10:04:53 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 10:04:53 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 10:04:53 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 10:04:53 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 10:04:53 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 10:04:53 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 10:04:53 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@4df205c +2016-04-07 10:04:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 10:04:53 INFO ASLSession:352 - Logging the entrance +2016-04-07 10:04:53 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 10:04:53 DEBUG TemplateModel:83 - 2016-04-07 10:04:53, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 10:04:53 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 10:04:53 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 10:04:54 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 122 ms +2016-04-07 10:04:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 10:04:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 10:04:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 10:04:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 10:04:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 10:04:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 10:04:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 10:04:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 10:04:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 10:04:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 10:04:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 10:04:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 10:04:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 10:04:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 10:04:54 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 10:04:54 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 10:04:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 10:04:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6ac2840c +2016-04-07 10:04:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@360b8b2f +2016-04-07 10:04:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@25459524 +2016-04-07 10:04:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@4ee4afad +2016-04-07 10:04:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 154 ms +2016-04-07 10:04:55 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 10:04:55 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 10:04:55 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 10:04:55 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 10:04:55 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 10:04:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:04:55 INFO ASLSession:352 - Logging the entrance +2016-04-07 10:04:55 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 10:04:55 DEBUG TemplateModel:83 - 2016-04-07 10:04:55, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 10:04:55 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 10:04:55 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 10:04:55 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 10:04:55 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 10:04:55 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 10:04:55 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 10:04:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 10:04:55 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 10:04:55 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 10:04:55 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-07 10:04:55 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 10:04:55 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 10:04:55 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 10:04:55 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 10:04:55 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 10:04:55 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 10:04:55 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 10:04:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 10:04:55 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 10:04:55 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 10:04:55 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 10:04:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:04:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:04:55 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 10:04:55 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 10:04:55 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 10:04:55 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 10:04:55 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 10:04:55 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 10:04:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 10:04:55 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 10:04:55 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 10:04:55 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 45 ms +2016-04-07 10:04:56 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 10:04:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 10:04:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:04:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:04:56 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 10:04:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:04:56 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 10:04:56 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 10:04:56 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 10:04:56 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 10:04:57 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 10:04:57 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 10:04:57 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 10:04:57 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 10:04:57 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 10:04:57 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 10:04:57 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:04:57 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:04:57 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:04:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:04:57 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 10:04:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 10:04:57 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:04:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:04:57 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:04:57 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:04:57 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:04:57 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:04:57 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:04:57 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:04:57 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 10:04:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 10:04:57 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:04:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:04:57 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:04:57 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:04:57 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:04:57 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:04:57 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:04:57 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:04:57 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 10:04:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 10:04:57 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:04:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:04:57 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:04:57 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:04:57 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:04:57 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:04:57 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:04:57 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:04:57 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 10:04:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 10:04:57 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:04:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:04:58 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 10:04:58 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 10:04:58 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 10:04:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 10:04:58 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 10:04:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 10:04:58 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:04:58 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:04:58 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:04:58 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:04:58 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:04:58 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:04:58 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 10:04:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:04:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:04:58 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 10:04:58 INFO WorkspaceExplorerServiceImpl:188 - end time - 162 msc 0 sec +2016-04-07 10:05:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 10:05:50 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 10:06:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:06:45 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 10:10:23 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 10:10:23 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 10:10:23 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 10:10:23 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 10:10:23 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 10:10:23 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 10:10:23 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 10:10:23 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@64a107ee +2016-04-07 10:10:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 10:10:23 INFO ASLSession:352 - Logging the entrance +2016-04-07 10:10:23 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 10:10:23 DEBUG TemplateModel:83 - 2016-04-07 10:10:23, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 10:10:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 10:10:23 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 10:10:24 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 122 ms +2016-04-07 10:10:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 10:10:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 10:10:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 10:10:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 10:10:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 10:10:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 10:10:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 10:10:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 10:10:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 10:10:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 10:10:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 10:10:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 10:10:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 10:10:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 10:10:24 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 10:10:24 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 10:10:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 10:10:25 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 10:10:25 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 10:10:25 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 10:10:25 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 10:10:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:10:25 INFO ASLSession:352 - Logging the entrance +2016-04-07 10:10:25 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 10:10:25 DEBUG TemplateModel:83 - 2016-04-07 10:10:25, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 10:10:25 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 10:10:25 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 10:10:25 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@67e85aa3 +2016-04-07 10:10:25 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@2c6af0ae +2016-04-07 10:10:25 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6c62fec7 +2016-04-07 10:10:25 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@47b875ea +2016-04-07 10:10:25 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 553 ms +2016-04-07 10:10:25 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 10:10:25 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 10:10:25 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 10:10:25 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 10:10:25 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 10:10:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 10:10:25 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 10:10:25 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 10:10:25 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 24 ms +2016-04-07 10:10:25 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 10:10:25 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 10:10:25 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 10:10:25 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 10:10:25 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 10:10:25 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 10:10:25 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 10:10:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 10:10:25 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 10:10:25 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 10:10:25 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 10:10:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:10:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:10:26 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 10:10:26 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 10:10:26 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 10:10:26 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 10:10:26 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 10:10:26 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 10:10:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 10:10:26 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 10:10:26 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 10:10:26 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 74 ms +2016-04-07 10:10:26 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 10:10:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 10:10:26 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:10:26 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:10:26 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 10:10:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:10:26 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 10:10:26 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 10:10:26 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 10:10:27 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 10:10:27 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 10:10:27 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 10:10:27 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 10:10:27 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 10:10:27 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 10:10:27 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 10:10:27 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:10:27 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:10:27 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:10:27 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:10:27 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 10:10:27 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 10:10:27 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:10:27 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:10:28 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:10:28 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:10:28 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:10:28 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:10:28 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:10:28 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:10:28 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 10:10:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 10:10:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:10:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:10:28 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:10:28 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:10:28 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:10:28 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:10:28 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:10:28 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:10:28 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 10:10:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 10:10:29 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:10:29 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:10:29 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:10:29 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:10:29 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:10:29 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:10:29 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:10:29 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:10:29 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 10:10:29 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 10:10:29 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:10:29 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:10:29 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 10:10:29 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 10:10:29 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 10:10:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:10:29 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 10:10:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:10:29 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:10:29 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:10:29 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:10:29 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:10:29 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:10:29 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:10:29 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 10:10:29 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:10:29 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:10:29 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 10:10:29 INFO WorkspaceExplorerServiceImpl:188 - end time - 171 msc 0 sec +2016-04-07 10:11:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 10:11:20 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 10:12:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 10:12:01 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 10:12:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 10:12:01 INFO SClient4WPS:643 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS +2016-04-07 10:12:01 DEBUG SClient4WPS:276 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 10:12:01 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 10:12:01 DEBUG SClient4WPS:297 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS + XMEANS + A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + + + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + + + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 10:12:01 DEBUG SClient4WPS:301 - WPSClient->Fetching Inputs +2016-04-07 10:12:01 DEBUG SClient4WPS:303 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 10:12:01 DEBUG SClient4WPS:303 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 10:12:01 DEBUG SClient4WPS:303 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 10:12:01 DEBUG SClient4WPS:303 - WPSClient->Input: + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + +2016-04-07 10:12:01 DEBUG SClient4WPS:303 - WPSClient->Input: + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + +2016-04-07 10:12:01 DEBUG SClient4WPS:303 - WPSClient->Input: + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + +2016-04-07 10:12:01 DEBUG SClient4WPS:303 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-07 10:12:01 DEBUG SClient4WPS:308 - WPSClient->Fetching Outputs +2016-04-07 10:12:01 DEBUG SClient4WPS:310 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 10:12:01 DEBUG SClient4WPS:310 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 10:12:01 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 10:12:01 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 10:12:01 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 10:12:01 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 10:12:01 DEBUG WPS2SM:201 - Schema: null +2016-04-07 10:12:01 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 10:12:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 10:12:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 10:12:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 10:12:01 DEBUG SClient4WPS:658 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 10:12:01 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 10:12:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 10:12:01 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 10:12:01 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 10:12:01 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 10:12:01 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 10:12:01 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 10:12:01 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 10:12:01 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 10:12:01 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 10:12:01 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 10:12:01 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 10:12:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 10:12:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 10:12:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 10:12:01 DEBUG SClient4WPS:658 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 10:12:01 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 10:12:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 10:12:01 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 10:12:01 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 10:12:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 10:12:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 10:12:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 10:12:01 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 10:12:01 DEBUG WPS2SM:254 - Conversion to SM Type->maxIterations is a Literal Input +2016-04-07 10:12:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 10:12:01 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 10:12:01 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 10:12:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:XMeans max number of overall iterations of the clustering learning +2016-04-07 10:12:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxIterations +2016-04-07 10:12:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 10:12:01 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 10:12:01 DEBUG WPS2SM:254 - Conversion to SM Type->minClusters is a Literal Input +2016-04-07 10:12:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 10:12:01 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 10:12:01 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 10:12:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:minimum number of expected clusters +2016-04-07 10:12:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minClusters +2016-04-07 10:12:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 10:12:01 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 10:12:01 DEBUG WPS2SM:254 - Conversion to SM Type->maxClusters is a Literal Input +2016-04-07 10:12:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 10:12:01 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 10:12:01 DEBUG WPS2SM:101 - Guessed default value: 50 +2016-04-07 10:12:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of clusters to produce +2016-04-07 10:12:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxClusters +2016-04-07 10:12:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 10:12:01 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT] +2016-04-07 10:12:01 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 10:12:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 10:12:01 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 10:12:01 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-07 10:12:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-07 10:12:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 10:12:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 10:12:01 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-07 10:12:01 DEBUG SClient4WPS:662 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 10:12:02 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 10:12:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:12:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:12:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:12:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:12:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:12:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:12:02 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:12:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:12:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:12:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:12:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:12:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:12:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:12:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:12:02 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 10:12:02 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 10:12:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:12:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 10:12:02 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 10:12:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:12:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:12:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 10:12:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 10:12:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:12:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 10:12:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:12:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:12:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 10:12:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 10:12:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 10:12:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:12:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 10:12:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:12:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:12:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:12:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 10:12:02 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 10:12:02 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 10:12:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:12:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:12:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 10:12:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:12:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:12:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:12:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 10:12:02 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 10:12:02 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 10:12:02 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 10:12:02 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 54 ms +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-07 10:12:02 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 10:12:02 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 10:12:02 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 67 ms +2016-04-07 10:12:02 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 10:12:02 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 10:12:02 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 10:12:02 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-07 10:12:02 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-07 10:12:02 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 10:12:02 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 10:12:02 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-07 10:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 10:12:02 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 10:12:02 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 10:12:02 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 10:12:02 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 10:12:02 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 10:12:02 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 10:12:02 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 10:12:02 INFO WorkspaceExplorerServiceImpl:142 - end time - 455 msc 0 sec +2016-04-07 10:12:02 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 10:12:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 10:12:15 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 10:13:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:13:10 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 10:14:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 10:14:05 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 10:15:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 10:15:00 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 10:15:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 10:15:55 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 10:16:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 10:16:50 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 10:17:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:17:45 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 10:18:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:18:40 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 10:19:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 10:19:35 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 10:20:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:20:30 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 10:21:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:21:25 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 10:22:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 10:22:20 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 10:23:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 10:23:15 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 10:24:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 10:24:10 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 10:25:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 10:25:05 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 10:26:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:26:00 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 10:26:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:26:55 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 10:27:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 10:27:50 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 10:28:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 10:28:45 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 10:29:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 10:29:40 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 10:30:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 10:30:35 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 10:31:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:31:30 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 10:32:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:32:25 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 10:33:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 10:33:20 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 10:34:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 10:34:15 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 10:35:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:35:10 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 10:36:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 10:36:05 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 10:37:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 10:37:00 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 10:37:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 10:37:55 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 10:38:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 10:38:50 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 10:39:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 10:39:45 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 10:40:40 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 10:40:40 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 10:40:40 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 10:40:40 WARN SessionCheckerServiceImpl:80 - Scope is null at Thu Apr 07 10:40:40 CEST 2016 +2016-04-07 10:40:40 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:14:16 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 11:14:16 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 11:14:16 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 11:14:16 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:14:16 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:14:16 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 11:14:16 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:14:16 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@6fc884d7 +2016-04-07 11:14:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 11:14:16 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:14:16 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:14:16 DEBUG TemplateModel:83 - 2016-04-07 11:14:16, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:14:16 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:14:16 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:14:16 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 172 ms +2016-04-07 11:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 11:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 11:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 11:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 11:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 11:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 11:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 11:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 11:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 11:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 11:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 11:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 11:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 11:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 11:14:17 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 11:14:17 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:14:17 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:14:17 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:14:17 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:14:17 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 11:14:17 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:14:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:14:17 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:14:17 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:14:17 DEBUG TemplateModel:83 - 2016-04-07 11:14:17, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:14:17 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:14:17 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 11:14:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@9f56fe7 +2016-04-07 11:14:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@948eec4 +2016-04-07 11:14:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@8e894b0 +2016-04-07 11:14:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@e925e7f +2016-04-07 11:14:18 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 461 ms +2016-04-07 11:14:18 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 11:14:18 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:14:18 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:14:18 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:14:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:14:18 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 11:14:18 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:14:18 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 11:14:18 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 11:14:18 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 11:14:18 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 11:14:18 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 11:14:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:14:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:14:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 11:14:18 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:14:18 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 11:14:18 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 20 ms +2016-04-07 11:14:18 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 11:14:18 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:14:18 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 11:14:18 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 11:14:18 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 11:14:18 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 11:14:18 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 11:14:18 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 11:14:18 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 11:14:18 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 11:14:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 11:14:18 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:14:18 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 11:14:18 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 78 ms +2016-04-07 11:14:19 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 11:14:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:14:19 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:14:19 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:14:19 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 11:14:19 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:14:19 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 11:14:19 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:14:19 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:14:19 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 11:14:19 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:14:19 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:14:19 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 11:14:19 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 11:14:19 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:14:19 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:14:19 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:14:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:14:19 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:14:19 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 11:14:20 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 11:14:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 11:14:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:14:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:14:20 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:14:20 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:14:20 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:14:20 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:14:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:14:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:14:20 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:14:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 11:14:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:14:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:14:20 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:14:20 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:14:20 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:14:20 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:14:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:14:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:14:20 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:14:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 11:14:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:14:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:14:20 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:14:20 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:14:20 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:14:20 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:14:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:14:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:14:20 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:14:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 11:14:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:14:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:14:21 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:14:21 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:14:21 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:14:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:14:21 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 11:14:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:14:21 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:14:21 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:14:21 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:14:21 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:14:21 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:14:21 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:14:21 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 11:14:21 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:14:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:14:21 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 11:14:21 INFO WorkspaceExplorerServiceImpl:188 - end time - 166 msc 0 sec +2016-04-07 11:15:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:15:12 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 11:16:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:16:07 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 11:17:22 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 11:17:22 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 11:17:22 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 11:17:22 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:17:22 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:17:22 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 11:17:22 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:17:22 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2a725ecd +2016-04-07 11:17:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:17:22 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:17:22 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:17:22 DEBUG TemplateModel:83 - 2016-04-07 11:17:22, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:17:22 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:17:22 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:17:22 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 111 ms +2016-04-07 11:17:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 11:17:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 11:17:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 11:17:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 11:17:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 11:17:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 11:17:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 11:17:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 11:17:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 11:17:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 11:17:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 11:17:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 11:17:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 11:17:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 11:17:22 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 11:17:22 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:17:22 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:17:22 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@56ccb9cd +2016-04-07 11:17:22 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@61dce966 +2016-04-07 11:17:22 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@58174270 +2016-04-07 11:17:22 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@5a9e397a +2016-04-07 11:17:22 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 101 ms +2016-04-07 11:17:22 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 11:17:23 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 11:17:23 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 11:17:23 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 11:17:23 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 11:17:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:17:23 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:17:23 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 11:17:23 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-07 11:17:23 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 11:17:23 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:17:23 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 11:17:23 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 11:17:23 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:17:23 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:17:23 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 11:17:23 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:17:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:17:23 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:17:23 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:17:23 DEBUG TemplateModel:83 - 2016-04-07 11:17:23, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:17:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:17:23 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 11:17:23 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:17:23 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:17:23 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:17:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 11:17:23 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 11:17:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:17:23 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 11:17:23 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:17:23 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:17:23 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 11:17:23 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 11:17:23 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 11:17:23 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 11:17:23 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 11:17:23 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 11:17:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 11:17:23 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:17:23 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 11:17:23 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 79 ms +2016-04-07 11:17:24 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 11:17:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 11:17:24 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:17:24 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:17:24 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 11:17:24 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:17:24 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 11:17:24 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:17:24 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 11:17:24 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:17:24 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:17:24 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 11:17:24 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:17:24 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:17:24 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 11:17:24 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 11:17:24 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:17:24 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:17:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:17:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:17:24 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 11:17:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 11:17:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:17:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:17:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:17:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:17:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:17:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:17:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:17:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:17:25 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:17:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 11:17:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:17:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:17:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:17:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:17:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:17:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:17:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:17:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:17:25 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:17:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 11:17:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:17:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:17:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:17:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:17:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:17:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:17:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:17:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:17:25 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:17:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 11:17:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:17:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:17:26 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:17:26 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:17:26 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:17:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:17:26 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 11:17:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:17:26 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:17:26 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:17:26 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:17:26 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:17:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:17:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:17:26 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 11:17:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:17:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:17:26 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 11:17:26 INFO WorkspaceExplorerServiceImpl:188 - end time - 104 msc 0 sec +2016-04-07 11:18:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 11:18:18 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 11:19:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:19:13 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 11:20:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:20:08 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 11:21:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:21:03 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 11:21:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:21:58 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 11:22:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 11:22:53 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 11:23:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 11:23:48 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 11:24:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 11:24:43 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 11:26:07 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 11:26:07 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 11:26:07 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 11:26:07 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:26:07 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:26:07 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 11:26:07 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:26:07 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@4f6647c +2016-04-07 11:26:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 11:26:07 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:26:07 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:26:07 DEBUG TemplateModel:83 - 2016-04-07 11:26:07, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:26:07 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:26:07 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:26:07 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 109 ms +2016-04-07 11:26:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 11:26:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 11:26:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 11:26:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 11:26:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 11:26:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 11:26:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 11:26:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 11:26:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 11:26:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 11:26:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 11:26:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 11:26:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 11:26:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 11:26:07 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 11:26:07 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:26:07 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:26:07 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@3b0953a6 +2016-04-07 11:26:07 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@5ea312a1 +2016-04-07 11:26:07 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@21effddd +2016-04-07 11:26:07 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@6638819d +2016-04-07 11:26:07 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 126 ms +2016-04-07 11:26:08 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 11:26:08 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 11:26:08 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 11:26:08 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 11:26:08 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 11:26:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 11:26:08 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:26:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 11:26:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-07 11:26:08 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 11:26:08 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:26:08 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 11:26:08 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 11:26:08 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:26:08 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:26:08 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 11:26:08 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:26:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:26:08 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:26:08 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:26:08 DEBUG TemplateModel:83 - 2016-04-07 11:26:08, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:26:08 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:26:08 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 11:26:08 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:26:08 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:26:08 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:26:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:26:08 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 11:26:08 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:26:08 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 11:26:08 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:26:08 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:26:08 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 11:26:08 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 11:26:08 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 11:26:08 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 11:26:08 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 11:26:08 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 11:26:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 11:26:08 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:26:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 11:26:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 69 ms +2016-04-07 11:26:09 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 11:26:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:26:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:26:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:26:09 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 11:26:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:26:09 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 11:26:09 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:26:09 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 11:26:09 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:26:09 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:26:09 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 11:26:09 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:26:09 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:26:09 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 11:26:09 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 11:26:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:26:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:26:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:26:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:26:09 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 11:26:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 11:26:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:26:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:26:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:26:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:26:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:26:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:26:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:26:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:26:09 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:26:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 11:26:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:26:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:26:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:26:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:26:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:26:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:26:10 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:26:10 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:26:10 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:26:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 11:26:10 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:26:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:26:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:26:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:26:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:26:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:26:10 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:26:10 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:26:10 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:26:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 11:26:10 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:26:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:26:10 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:26:10 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:26:10 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:26:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:26:10 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 11:26:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:26:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:26:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:26:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:26:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:26:10 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:26:10 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:26:10 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 11:26:10 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:26:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:26:10 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 11:26:10 INFO WorkspaceExplorerServiceImpl:188 - end time - 122 msc 0 sec +2016-04-07 11:27:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:27:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 11:27:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:27:58 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 11:28:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 11:28:53 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 11:29:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:29:48 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 11:30:39 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 11:30:39 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 11:30:39 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 11:30:39 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:30:39 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:30:39 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 11:30:39 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:30:39 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@3d77405e +2016-04-07 11:30:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 11:30:39 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:30:39 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:30:39 DEBUG TemplateModel:83 - 2016-04-07 11:30:39, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:30:39 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:30:40 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:30:40 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 137 ms +2016-04-07 11:30:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 11:30:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 11:30:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 11:30:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 11:30:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 11:30:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 11:30:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 11:30:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 11:30:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 11:30:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 11:30:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 11:30:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 11:30:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 11:30:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 11:30:40 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 11:30:40 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:30:40 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:30:40 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@2fc5b266 +2016-04-07 11:30:40 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@553f4370 +2016-04-07 11:30:40 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@3483154e +2016-04-07 11:30:40 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@4569aa88 +2016-04-07 11:30:40 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 148 ms +2016-04-07 11:30:40 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:30:40 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:30:40 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 11:30:40 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:30:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 11:30:40 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:30:40 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:30:40 DEBUG TemplateModel:83 - 2016-04-07 11:30:40, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:30:40 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:30:40 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 11:30:40 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 11:30:40 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 11:30:40 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 11:30:40 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 11:30:40 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 11:30:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 11:30:40 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:30:40 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 11:30:40 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-07 11:30:40 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 11:30:40 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:30:40 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 11:30:40 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 11:30:40 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:30:40 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:30:40 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:30:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:30:40 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 11:30:40 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:30:40 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 11:30:40 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:30:40 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:30:41 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 11:30:41 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 11:30:41 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 11:30:41 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 11:30:41 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 11:30:41 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 11:30:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 11:30:41 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:30:41 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 11:30:41 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 101 ms +2016-04-07 11:30:41 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 11:30:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:30:41 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:30:41 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:30:41 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 11:30:41 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:30:41 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 11:30:41 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:30:41 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:30:41 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 11:30:41 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:30:41 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:30:41 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 11:30:41 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 11:30:41 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:30:41 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:30:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:30:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:30:41 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 11:30:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 11:30:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:30:41 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:30:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:30:41 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 11:30:41 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:30:41 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:30:41 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:30:41 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:30:41 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:30:41 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:30:41 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:30:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 11:30:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:30:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:30:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:30:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:30:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:30:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:30:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:30:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:30:42 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:30:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 11:30:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:30:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:30:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:30:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:30:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:30:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:30:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:30:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:30:42 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:30:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 11:30:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:30:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:30:42 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:30:42 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:30:42 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:30:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 11:30:42 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 11:30:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 11:30:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:30:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:30:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:30:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:30:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:30:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:30:42 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 11:30:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:30:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:30:42 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 11:30:42 INFO WorkspaceExplorerServiceImpl:188 - end time - 116 msc 0 sec +2016-04-07 11:31:53 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 11:31:53 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 11:31:53 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 11:31:53 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:31:53 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:31:53 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 11:31:53 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:31:53 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@a5c3327 +2016-04-07 11:31:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:31:53 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:31:53 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:31:53 DEBUG TemplateModel:83 - 2016-04-07 11:31:53, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:31:53 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:31:53 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:31:53 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 114 ms +2016-04-07 11:31:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 11:31:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 11:31:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 11:31:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 11:31:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 11:31:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 11:31:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 11:31:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 11:31:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 11:31:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 11:31:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 11:31:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 11:31:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 11:31:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 11:31:53 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 11:31:53 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:31:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:31:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@58977fec +2016-04-07 11:31:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@7d8f87ff +2016-04-07 11:31:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@20043841 +2016-04-07 11:31:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@7b8a1b5f +2016-04-07 11:31:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 99 ms +2016-04-07 11:31:54 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 11:31:54 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:31:54 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:31:54 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 11:31:54 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:31:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 11:31:54 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:31:54 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:31:54 DEBUG TemplateModel:83 - 2016-04-07 11:31:54, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:31:54 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:31:54 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 11:31:54 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 11:31:54 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 11:31:54 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 11:31:54 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 11:31:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:31:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:31:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 11:31:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 31 ms +2016-04-07 11:31:54 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 11:31:54 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:31:54 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 11:31:54 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 11:31:54 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:31:54 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:31:54 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:31:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:31:54 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 11:31:54 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:31:54 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 11:31:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:31:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:31:54 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 11:31:54 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 11:31:54 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 11:31:54 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 11:31:54 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 11:31:54 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 11:31:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 11:31:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:31:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 11:31:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 80 ms +2016-04-07 11:31:54 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 11:31:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:31:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:31:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:31:54 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 11:31:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:31:54 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 11:31:55 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:31:55 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 11:31:55 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:31:55 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:31:55 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 11:31:55 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:31:55 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:31:55 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 11:31:55 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 11:31:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:31:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:31:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:31:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:31:55 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 11:31:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 11:31:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:31:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:31:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:31:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:31:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:31:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:31:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:31:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:31:55 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:31:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 11:31:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:31:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:31:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:31:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:31:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:31:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:31:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:31:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:31:55 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:31:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 11:31:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:31:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:31:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:31:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:31:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:31:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:31:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:31:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:31:56 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:31:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 11:31:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:31:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:31:56 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:31:56 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:31:56 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:31:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 11:31:56 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 11:31:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 11:31:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:31:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:31:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:31:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:31:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:31:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:31:56 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 11:31:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:31:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:31:56 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 11:31:56 INFO WorkspaceExplorerServiceImpl:188 - end time - 117 msc 0 sec +2016-04-07 11:33:00 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 11:33:00 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 11:33:00 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 11:33:00 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:33:00 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:33:00 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 11:33:00 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:33:00 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@71cad1e6 +2016-04-07 11:33:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:33:00 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:33:00 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:33:00 DEBUG TemplateModel:83 - 2016-04-07 11:33:00, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:33:00 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:33:00 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:33:00 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 120 ms +2016-04-07 11:33:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 11:33:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 11:33:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 11:33:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 11:33:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 11:33:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 11:33:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 11:33:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 11:33:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 11:33:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 11:33:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 11:33:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 11:33:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 11:33:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 11:33:01 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 11:33:01 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:33:01 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:33:01 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@60ef71c7 +2016-04-07 11:33:01 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@3c6d1de5 +2016-04-07 11:33:01 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@28642834 +2016-04-07 11:33:01 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@25b936bf +2016-04-07 11:33:01 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 105 ms +2016-04-07 11:33:01 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 11:33:01 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:33:01 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:33:01 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 11:33:01 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:33:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:33:01 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:33:01 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:33:01 DEBUG TemplateModel:83 - 2016-04-07 11:33:01, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:33:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:33:01 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 11:33:01 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 11:33:01 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 11:33:01 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 11:33:01 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 11:33:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:33:01 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:33:01 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 11:33:01 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-07 11:33:01 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 11:33:01 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:33:01 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 11:33:01 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 11:33:01 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:33:01 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:33:01 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:33:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 11:33:01 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 11:33:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:33:01 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 11:33:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:33:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:33:01 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 11:33:01 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 11:33:01 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 11:33:01 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 11:33:01 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 11:33:01 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 11:33:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 11:33:01 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:33:01 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 11:33:01 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 90 ms +2016-04-07 11:33:02 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 11:33:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 11:33:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:33:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:33:02 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 11:33:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:33:02 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 11:33:02 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:33:02 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 11:33:02 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:33:02 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:33:02 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 11:33:02 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:33:02 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:33:02 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 11:33:02 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 11:33:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:33:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:33:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:33:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:33:02 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 11:33:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 11:33:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:33:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:33:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:33:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:33:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:33:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:33:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:33:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:33:02 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:33:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 11:33:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:33:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:33:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:33:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:33:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:33:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:33:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:33:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:33:02 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:33:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 11:33:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:33:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:33:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:33:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:33:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:33:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:33:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:33:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:33:03 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:33:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 11:33:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:33:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:33:03 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:33:03 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:33:03 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:33:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:33:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 11:33:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:33:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:33:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:33:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:33:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:33:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:33:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:33:03 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 11:33:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:33:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:33:03 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 11:33:03 INFO WorkspaceExplorerServiceImpl:188 - end time - 109 msc 0 sec +2016-04-07 11:33:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 11:33:56 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 11:35:02 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 11:35:02 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 11:35:02 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 11:35:02 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:35:02 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:35:02 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 11:35:02 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:35:02 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7d3aca15 +2016-04-07 11:35:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:35:02 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:35:02 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:35:02 DEBUG TemplateModel:83 - 2016-04-07 11:35:02, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:35:02 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:35:02 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:35:02 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 117 ms +2016-04-07 11:35:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 11:35:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 11:35:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 11:35:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 11:35:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 11:35:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 11:35:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 11:35:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 11:35:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 11:35:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 11:35:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 11:35:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 11:35:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 11:35:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 11:35:03 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 11:35:03 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:35:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:35:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@c270e4c +2016-04-07 11:35:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@4c045e99 +2016-04-07 11:35:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6edc501e +2016-04-07 11:35:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@8216164 +2016-04-07 11:35:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 106 ms +2016-04-07 11:35:03 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:35:03 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:35:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 11:35:03 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:35:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:35:03 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:35:03 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:35:03 DEBUG TemplateModel:83 - 2016-04-07 11:35:03, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:35:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:35:03 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 11:35:03 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 11:35:03 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 11:35:03 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 11:35:03 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 11:35:03 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 11:35:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:35:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:35:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 11:35:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 31 ms +2016-04-07 11:35:03 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 11:35:03 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:35:03 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 11:35:03 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 11:35:03 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:35:03 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:35:03 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:35:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 11:35:03 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 11:35:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:35:03 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 11:35:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:35:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:35:03 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 11:35:03 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 11:35:03 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 11:35:03 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 11:35:03 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 11:35:03 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 11:35:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 11:35:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:35:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 11:35:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 115 ms +2016-04-07 11:35:04 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 11:35:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 11:35:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:35:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:35:04 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 11:35:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:35:04 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 11:35:04 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:35:04 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 11:35:04 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:35:04 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:35:04 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 11:35:04 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:35:04 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:35:04 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 11:35:04 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 11:35:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:35:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:35:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:35:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:35:04 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 11:35:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 11:35:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:35:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:35:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:35:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:35:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:35:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:35:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:35:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:35:04 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:35:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 11:35:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:35:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:35:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:35:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:35:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:35:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:35:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:35:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:35:05 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:35:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 11:35:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:35:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:35:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:35:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:35:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:35:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:35:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:35:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:35:05 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:35:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 11:35:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:35:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:35:05 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:35:05 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:35:05 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:35:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:35:05 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 11:35:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:35:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:35:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:35:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:35:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:35:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:35:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:35:05 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 11:35:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:35:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:35:05 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 11:35:05 INFO WorkspaceExplorerServiceImpl:188 - end time - 105 msc 0 sec +2016-04-07 11:35:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 11:35:58 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 11:36:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 11:36:53 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 11:37:23 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 11:37:23 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 11:37:23 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 11:37:23 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:37:23 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:37:23 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 11:37:23 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:37:23 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@5196a0c7 +2016-04-07 11:37:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 11:37:23 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:37:23 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:37:23 DEBUG TemplateModel:83 - 2016-04-07 11:37:23, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:37:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:37:23 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:37:23 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 120 ms +2016-04-07 11:37:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 11:37:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 11:37:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 11:37:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 11:37:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 11:37:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 11:37:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 11:37:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 11:37:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 11:37:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 11:37:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 11:37:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 11:37:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 11:37:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 11:37:23 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 11:37:23 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:37:23 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:37:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@4d0b0a36 +2016-04-07 11:37:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@4515e678 +2016-04-07 11:37:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@752ed867 +2016-04-07 11:37:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@6afc6820 +2016-04-07 11:37:23 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 100 ms +2016-04-07 11:37:23 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 11:37:23 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 11:37:23 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 11:37:23 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 11:37:23 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 11:37:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 11:37:23 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:37:23 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 11:37:23 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 18 ms +2016-04-07 11:37:23 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 11:37:23 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:37:23 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 11:37:23 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 11:37:23 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:37:23 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:37:23 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 11:37:23 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:37:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:37:23 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:37:23 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:37:23 DEBUG TemplateModel:83 - 2016-04-07 11:37:23, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:37:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:37:23 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 11:37:24 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:37:24 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:37:24 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:37:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:37:24 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 11:37:24 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:37:24 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 11:37:24 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:37:24 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:37:24 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 11:37:24 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 11:37:24 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 11:37:24 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 11:37:24 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 11:37:24 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 11:37:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 11:37:24 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:37:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 11:37:24 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-07 11:37:24 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 11:37:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:37:24 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:37:24 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:37:24 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 11:37:24 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:37:24 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 11:37:24 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:37:24 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 11:37:24 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:37:24 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:37:24 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 11:37:24 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:37:24 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:37:24 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 11:37:24 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 11:37:24 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:37:24 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:37:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:37:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:37:24 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 11:37:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 11:37:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:37:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:37:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:37:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:37:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:37:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:37:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:37:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:37:25 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:37:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 11:37:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:37:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:37:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:37:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:37:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:37:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:37:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:37:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:37:25 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:37:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 11:37:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:37:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:37:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:37:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:37:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:37:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:37:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:37:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:37:25 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:37:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 11:37:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:37:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:37:25 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:37:25 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:37:25 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:37:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:37:25 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 11:37:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:37:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:37:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:37:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:37:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:37:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:37:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:37:25 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 11:37:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:37:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:37:26 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 11:37:26 INFO WorkspaceExplorerServiceImpl:188 - end time - 105 msc 0 sec +2016-04-07 11:38:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 11:38:18 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 11:39:28 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 11:39:28 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 11:39:28 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 11:39:28 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:39:28 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:39:28 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 11:39:28 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:39:28 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@144f5b9d +2016-04-07 11:39:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:39:28 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:39:28 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:39:28 DEBUG TemplateModel:83 - 2016-04-07 11:39:28, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:39:28 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:39:28 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:39:28 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 123 ms +2016-04-07 11:39:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 11:39:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 11:39:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 11:39:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 11:39:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 11:39:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 11:39:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 11:39:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 11:39:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 11:39:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 11:39:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 11:39:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 11:39:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 11:39:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 11:39:28 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 11:39:28 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:39:28 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:39:29 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@3388a37 +2016-04-07 11:39:29 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@715b4541 +2016-04-07 11:39:29 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@f5ea9ee +2016-04-07 11:39:29 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@60807fa0 +2016-04-07 11:39:29 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 98 ms +2016-04-07 11:39:29 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 11:39:29 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 11:39:29 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 11:39:29 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 11:39:29 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 11:39:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:39:29 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:39:29 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 11:39:29 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:39:29 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:39:29 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 11:39:29 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:39:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:39:29 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:39:29 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:39:29 DEBUG TemplateModel:83 - 2016-04-07 11:39:29, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:39:29 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:39:29 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 11:39:29 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-07 11:39:29 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 11:39:29 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:39:29 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 11:39:29 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 11:39:29 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:39:29 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:39:29 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:39:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:39:29 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 11:39:29 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:39:29 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 11:39:29 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:39:29 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:39:29 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 11:39:29 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 11:39:29 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 11:39:29 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 11:39:29 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 11:39:29 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 11:39:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 11:39:29 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:39:29 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 11:39:29 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-07 11:39:29 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 11:39:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:39:29 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:39:29 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:39:29 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 11:39:29 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:39:29 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 11:39:30 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:39:30 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:39:30 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:39:30 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 11:39:30 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 11:39:30 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:39:30 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:39:30 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 11:39:30 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 11:39:30 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:39:30 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:39:30 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:39:30 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:39:30 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 11:39:30 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 11:39:30 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:39:30 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:39:30 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:39:30 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:39:30 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:39:30 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:39:30 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:39:30 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:39:30 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:39:30 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 11:39:30 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:39:30 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:39:30 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:39:30 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:39:30 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:39:30 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:39:30 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:39:30 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:39:30 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:39:30 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 11:39:30 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:39:30 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:39:30 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:39:30 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:39:30 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:39:30 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:39:30 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:39:30 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:39:30 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:39:30 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 11:39:30 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:39:30 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:39:31 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:39:31 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:39:31 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:39:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:39:31 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 11:39:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:39:31 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:39:31 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:39:31 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:39:31 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:39:31 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:39:31 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:39:31 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 11:39:31 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:39:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:39:31 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 11:39:31 INFO WorkspaceExplorerServiceImpl:188 - end time - 167 msc 0 sec +2016-04-07 11:40:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:40:24 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 11:41:24 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 11:41:24 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 11:41:24 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 11:41:24 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:41:24 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:41:24 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 11:41:24 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:41:24 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@15a4ba6b +2016-04-07 11:41:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 11:41:24 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:41:24 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:41:24 DEBUG TemplateModel:83 - 2016-04-07 11:41:24, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:41:24 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:41:24 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:41:24 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 114 ms +2016-04-07 11:41:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 11:41:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 11:41:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 11:41:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 11:41:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 11:41:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 11:41:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 11:41:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 11:41:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 11:41:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 11:41:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 11:41:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 11:41:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 11:41:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 11:41:24 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 11:41:24 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:41:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:41:24 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@75879700 +2016-04-07 11:41:24 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@b6e5298 +2016-04-07 11:41:24 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@45886e3d +2016-04-07 11:41:24 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@53ccbc09 +2016-04-07 11:41:24 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 111 ms +2016-04-07 11:41:24 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 11:41:24 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:41:24 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:41:24 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 11:41:24 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:41:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 11:41:24 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:41:24 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:41:24 DEBUG TemplateModel:83 - 2016-04-07 11:41:24, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:41:24 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:41:24 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 11:41:24 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 11:41:24 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 11:41:24 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 11:41:24 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 11:41:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 11:41:24 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:41:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 11:41:24 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 23 ms +2016-04-07 11:41:24 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 11:41:24 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:41:24 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 11:41:24 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 11:41:25 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:41:25 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:41:25 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:41:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:41:25 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 11:41:25 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:41:25 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 11:41:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:41:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:41:25 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 11:41:25 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 11:41:25 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 11:41:25 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 11:41:25 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 11:41:25 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 11:41:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 11:41:25 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:41:25 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 11:41:25 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-07 11:41:25 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 11:41:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:41:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:41:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:41:25 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 11:41:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:41:25 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 11:41:25 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:41:25 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 11:41:25 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:41:25 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:41:25 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 11:41:25 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:41:25 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:41:25 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 11:41:25 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 11:41:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:41:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:41:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:41:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:41:25 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 11:41:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 11:41:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:41:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:41:26 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:41:26 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:41:26 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:41:26 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:41:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:41:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:41:26 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:41:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 11:41:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:41:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:41:26 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:41:26 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:41:26 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:41:26 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:41:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:41:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:41:26 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:41:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 11:41:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:41:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:41:26 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:41:26 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:41:26 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:41:26 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:41:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:41:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:41:26 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:41:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 11:41:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:41:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:41:26 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:41:26 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:41:26 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:41:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 11:41:26 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 11:41:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 11:41:26 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:41:26 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:41:26 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:41:26 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:41:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:41:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:41:26 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 11:41:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:41:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:41:26 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 11:41:26 INFO WorkspaceExplorerServiceImpl:188 - end time - 114 msc 0 sec +2016-04-07 11:42:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:42:19 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 11:43:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:43:14 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 11:43:42 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 11:43:42 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 11:43:42 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 11:43:42 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:43:42 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:43:42 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 11:43:42 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:43:42 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@67150794 +2016-04-07 11:43:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:43:42 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:43:42 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:43:42 DEBUG TemplateModel:83 - 2016-04-07 11:43:42, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:43:42 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:43:42 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:43:42 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 126 ms +2016-04-07 11:43:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 11:43:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 11:43:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 11:43:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 11:43:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 11:43:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 11:43:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 11:43:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 11:43:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 11:43:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 11:43:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 11:43:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 11:43:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 11:43:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 11:43:42 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 11:43:42 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:43:42 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:43:42 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@b0db86c +2016-04-07 11:43:42 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@14d55974 +2016-04-07 11:43:42 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@675ac31b +2016-04-07 11:43:42 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@33156d0 +2016-04-07 11:43:42 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 105 ms +2016-04-07 11:43:42 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 11:43:42 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:43:42 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:43:42 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 11:43:42 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:43:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 11:43:42 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:43:42 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:43:42 DEBUG TemplateModel:83 - 2016-04-07 11:43:42, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:43:42 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:43:42 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 11:43:42 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 11:43:42 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 11:43:42 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 11:43:42 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 11:43:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:43:42 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:43:42 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 11:43:43 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-07 11:43:43 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 11:43:43 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:43:43 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 11:43:43 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 11:43:43 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:43:43 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:43:43 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:43:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:43:43 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 11:43:43 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:43:43 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 11:43:43 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:43:43 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:43:43 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 11:43:43 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 11:43:43 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 11:43:43 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 11:43:43 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 11:43:43 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 11:43:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 11:43:43 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:43:43 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 11:43:43 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 50 ms +2016-04-07 11:43:43 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 11:43:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:43:43 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:43:43 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:43:43 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 11:43:43 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:43:43 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 11:43:43 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:43:43 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 11:43:43 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:43:44 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:43:44 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 11:43:44 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:43:44 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:43:44 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 11:43:44 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 11:43:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:43:44 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:43:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:43:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:43:44 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 11:43:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 11:43:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:43:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:43:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:43:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:43:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:43:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:43:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:43:44 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:43:44 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:43:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 11:43:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:43:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:43:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:43:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:43:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:43:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:43:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:43:44 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:43:44 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:43:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 11:43:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:43:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:43:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:43:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:43:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:43:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:43:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:43:44 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:43:44 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:43:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 11:43:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:43:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:43:45 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:43:45 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:43:45 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:43:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:43:45 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 11:43:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:43:45 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:43:45 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:43:45 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:43:45 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:43:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:43:45 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:43:45 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 11:43:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:43:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:43:45 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 11:43:45 INFO WorkspaceExplorerServiceImpl:188 - end time - 112 msc 0 sec +2016-04-07 11:44:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:44:37 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 11:45:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:45:32 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 11:46:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 11:46:27 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 11:47:07 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 11:47:07 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 11:47:07 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 11:47:07 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:47:07 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:47:07 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 11:47:07 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:47:07 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@51ae2f2a +2016-04-07 11:47:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:47:07 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:47:07 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:47:07 DEBUG TemplateModel:83 - 2016-04-07 11:47:07, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:47:07 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:47:07 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:47:07 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 122 ms +2016-04-07 11:47:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 11:47:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 11:47:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 11:47:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 11:47:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 11:47:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 11:47:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 11:47:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 11:47:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 11:47:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 11:47:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 11:47:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 11:47:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 11:47:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 11:47:08 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 11:47:08 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:47:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:47:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@296d0c5 +2016-04-07 11:47:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@27ed8d28 +2016-04-07 11:47:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@199b8c37 +2016-04-07 11:47:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@6b4c37a1 +2016-04-07 11:47:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 116 ms +2016-04-07 11:47:08 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 11:47:08 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 11:47:08 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 11:47:08 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 11:47:08 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 11:47:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:47:08 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:47:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 11:47:08 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:47:08 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:47:08 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 11:47:08 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:47:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:47:08 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:47:08 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:47:08 DEBUG TemplateModel:83 - 2016-04-07 11:47:08, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:47:08 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:47:08 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 11:47:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 25 ms +2016-04-07 11:47:08 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 11:47:08 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:47:08 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 11:47:08 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 11:47:08 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:47:08 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:47:08 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:47:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:47:08 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 11:47:08 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:47:08 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 11:47:08 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:47:08 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:47:08 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 11:47:08 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 11:47:08 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 11:47:08 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 11:47:08 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 11:47:08 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 11:47:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 11:47:08 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:47:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 11:47:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 21 ms +2016-04-07 11:47:09 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 11:47:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:47:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:47:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:47:09 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 11:47:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:47:09 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 11:47:09 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:47:09 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 11:47:09 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:47:09 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:47:09 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 11:47:09 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:47:09 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:47:09 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 11:47:09 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 11:47:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:47:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:47:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:47:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:47:09 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 11:47:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 11:47:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:47:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:47:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:47:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:47:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:47:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:47:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:47:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:47:09 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:47:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 11:47:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:47:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:47:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:47:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:47:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:47:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:47:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:47:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:47:09 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:47:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 11:47:10 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:47:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:47:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:47:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:47:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:47:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:47:10 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:47:10 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:47:10 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:47:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 11:47:10 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:47:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:47:10 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:47:10 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:47:10 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:47:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:47:10 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 11:47:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:47:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:47:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:47:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:47:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:47:10 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:47:10 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:47:10 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 11:47:10 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:47:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:47:10 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 11:47:10 INFO WorkspaceExplorerServiceImpl:188 - end time - 104 msc 0 sec +2016-04-07 11:48:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:48:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 11:48:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:48:58 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 11:51:16 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 11:51:16 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 11:51:16 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 11:51:16 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:51:16 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:51:16 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 11:51:16 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:51:16 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@697a3867 +2016-04-07 11:51:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:51:16 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:51:16 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:51:16 DEBUG TemplateModel:83 - 2016-04-07 11:51:16, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:51:16 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:51:16 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:51:16 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 119 ms +2016-04-07 11:51:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 11:51:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 11:51:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 11:51:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 11:51:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 11:51:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 11:51:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 11:51:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 11:51:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 11:51:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 11:51:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 11:51:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 11:51:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 11:51:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 11:51:16 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 11:51:16 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:51:16 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:51:16 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6b3b151c +2016-04-07 11:51:16 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1524b4b2 +2016-04-07 11:51:16 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@2a85945 +2016-04-07 11:51:16 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@7c0899f1 +2016-04-07 11:51:16 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 111 ms +2016-04-07 11:51:16 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 11:51:16 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 11:51:16 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 11:51:16 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 11:51:16 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 11:51:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:51:16 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:51:16 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 11:51:16 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 23 ms +2016-04-07 11:51:16 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 11:51:16 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:51:16 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 11:51:16 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 11:51:17 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:51:17 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:51:17 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 11:51:17 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:51:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:51:17 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:51:17 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:51:17 DEBUG TemplateModel:83 - 2016-04-07 11:51:17, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:51:17 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:51:17 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 11:51:17 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:51:17 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:51:17 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:51:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 37 +2016-04-07 11:51:17 DEBUG ASLSession:458 - Getting security token: null in thread 37 +2016-04-07 11:51:17 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:51:17 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 11:51:17 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:51:17 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:51:17 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 11:51:17 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 11:51:17 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 11:51:17 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 11:51:17 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 11:51:17 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 11:51:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 37 +2016-04-07 11:51:17 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:51:17 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 11:51:17 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-07 11:51:17 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:51:17 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 11:51:17 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 11:51:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 37 +2016-04-07 11:51:17 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:51:17 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:51:17 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 11:51:17 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:51:17 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 11:51:18 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:51:18 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:51:18 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 11:51:18 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:51:18 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:51:18 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 11:51:18 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 11:51:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:51:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:51:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:51:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:51:18 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 11:51:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 11:51:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:51:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:51:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:51:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:51:18 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:51:18 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:51:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:51:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:51:18 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:51:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 11:51:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:51:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:51:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:51:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:51:18 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:51:18 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:51:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:51:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:51:18 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:51:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 11:51:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:51:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:51:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:51:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:51:18 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:51:18 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:51:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:51:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:51:18 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:51:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 11:51:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:51:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:51:19 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:51:19 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:51:19 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:51:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:51:19 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 11:51:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:51:19 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:51:19 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:51:19 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:51:19 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:51:19 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:51:19 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:51:19 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 11:51:19 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:51:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:51:19 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 11:51:19 INFO WorkspaceExplorerServiceImpl:188 - end time - 117 msc 0 sec +2016-04-07 11:51:31 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:51:31 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:51:31 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 11:51:31 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:51:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:51:31 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:51:31 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:51:31 DEBUG TemplateModel:83 - 2016-04-07 11:51:31, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:51:31 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:51:31 INFO DiscoveryDelegate:77 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
(cached) +2016-04-07 11:51:32 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:51:32 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:51:32 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 11:51:32 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:51:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 11:51:32 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:51:32 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:51:32 DEBUG TemplateModel:83 - 2016-04-07 11:51:32, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:51:32 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:51:32 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 11:51:32 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:51:32 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:51:32 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:51:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 11:51:32 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 11:51:32 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:51:32 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 11:51:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:51:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:51:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:51:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:51:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:51:32 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:51:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:51:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:51:32 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 11:51:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 11:51:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:51:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:51:32 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 11:51:32 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 11:51:32 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 11:51:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:51:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:51:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:51:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:51:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:51:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:51:32 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:51:32 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:51:32 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 11:51:32 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:51:32 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 25 ms +2016-04-07 11:51:32 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 11:51:32 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:51:32 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 11:51:32 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 11:51:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 11:51:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:51:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:51:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:51:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:51:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:51:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:51:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:51:32 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:51:32 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:51:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 11:51:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:51:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:51:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:51:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:51:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:51:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:51:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:51:32 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:51:32 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:51:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 11:51:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:51:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:51:33 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:51:33 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 11:51:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 11:51:33 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 11:51:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 11:51:33 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:51:33 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:51:33 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:51:33 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:51:33 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:51:33 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:51:33 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 11:51:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:51:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:51:33 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 11:51:33 INFO WorkspaceExplorerServiceImpl:188 - end time - 70 msc 0 sec +2016-04-07 11:53:00 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 11:53:00 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 11:53:00 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 11:53:00 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:53:00 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:53:00 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 11:53:00 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:53:00 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@615f2669 +2016-04-07 11:53:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 11:53:00 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:53:00 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:53:00 DEBUG TemplateModel:83 - 2016-04-07 11:53:00, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:53:00 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:53:00 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:53:00 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 109 ms +2016-04-07 11:53:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 11:53:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 11:53:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 11:53:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 11:53:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 11:53:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 11:53:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 11:53:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 11:53:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 11:53:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 11:53:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 11:53:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 11:53:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 11:53:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 11:53:00 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 11:53:00 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:53:00 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:53:00 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@17e9e2 +2016-04-07 11:53:00 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@77bb876f +2016-04-07 11:53:00 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@3613522 +2016-04-07 11:53:00 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@1b5064bf +2016-04-07 11:53:00 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 116 ms +2016-04-07 11:53:00 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 11:53:00 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 11:53:00 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 11:53:00 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 11:53:00 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 11:53:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 11:53:00 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:53:00 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 11:53:00 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 21 ms +2016-04-07 11:53:00 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 11:53:00 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:53:00 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 11:53:00 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 11:53:01 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:53:01 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:53:01 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 11:53:01 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:53:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:53:01 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:53:01 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:53:01 DEBUG TemplateModel:83 - 2016-04-07 11:53:01, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:53:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:53:01 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 11:53:01 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:53:01 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:53:01 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:53:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 11:53:01 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 11:53:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:53:01 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 11:53:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:53:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:53:01 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 11:53:01 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 11:53:01 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 11:53:01 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 11:53:01 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 11:53:01 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 11:53:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 11:53:01 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:53:01 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 11:53:01 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 20 ms +2016-04-07 11:53:01 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:53:01 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 11:53:02 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 11:53:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 11:53:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:53:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:53:02 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 11:53:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:53:02 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 11:53:02 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:53:02 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:53:02 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 11:53:02 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:53:02 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:53:02 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 11:53:02 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 11:53:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:53:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:53:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:53:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:53:02 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 11:53:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 11:53:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:53:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:53:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:53:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:53:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:53:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:53:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:53:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:53:02 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:53:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 11:53:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:53:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:53:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:53:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:53:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:53:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:53:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:53:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:53:02 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:53:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 11:53:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:53:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:53:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:53:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:53:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:53:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:53:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:53:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:53:02 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:53:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 11:53:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:53:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:53:03 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:53:03 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:53:03 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:53:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:53:03 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 11:53:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:53:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:53:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:53:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:53:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:53:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:53:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:53:03 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 11:53:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:53:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:53:03 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 11:53:03 INFO WorkspaceExplorerServiceImpl:188 - end time - 113 msc 0 sec +2016-04-07 11:53:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 11:53:56 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 11:54:50 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 11:54:50 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 11:54:50 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 11:54:50 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:54:50 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:54:50 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 11:54:50 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:54:50 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@4e93717a +2016-04-07 11:54:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 11:54:50 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:54:50 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:54:50 DEBUG TemplateModel:83 - 2016-04-07 11:54:50, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:54:50 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:54:50 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:54:50 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 117 ms +2016-04-07 11:54:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 11:54:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 11:54:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 11:54:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 11:54:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 11:54:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 11:54:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 11:54:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 11:54:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 11:54:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 11:54:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 11:54:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 11:54:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 11:54:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 11:54:50 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 11:54:50 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:54:50 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 11:54:50 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@5dacb5f6 +2016-04-07 11:54:50 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@51483458 +2016-04-07 11:54:50 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@11249ab5 +2016-04-07 11:54:50 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@67fd4d68 +2016-04-07 11:54:50 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 111 ms +2016-04-07 11:54:51 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 11:54:51 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 11:54:51 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 11:54:51 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 11:54:51 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 11:54:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 11:54:51 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:54:51 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 11:54:51 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-07 11:54:51 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 11:54:51 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:54:51 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 11:54:51 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 11:54:51 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 11:54:51 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 11:54:51 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 11:54:51 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 11:54:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:54:51 INFO ASLSession:352 - Logging the entrance +2016-04-07 11:54:51 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 11:54:51 DEBUG TemplateModel:83 - 2016-04-07 11:54:51, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 11:54:51 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:54:51 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 11:54:51 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:54:51 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:54:51 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:54:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:54:51 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 11:54:51 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 11:54:51 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 11:54:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:54:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:54:51 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 11:54:51 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 11:54:51 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 11:54:51 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 11:54:51 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 11:54:51 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 11:54:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 11:54:52 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 11:54:52 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 11:54:52 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 19 ms +2016-04-07 11:54:52 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 11:54:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 11:54:52 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:54:52 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:54:52 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 11:54:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:54:52 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 11:54:52 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 11:54:52 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 11:54:52 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:54:52 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:54:52 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 11:54:52 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 11:54:52 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 11:54:52 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 11:54:52 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 11:54:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:54:52 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:54:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:54:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:54:52 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 11:54:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 11:54:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:54:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:54:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:54:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:54:52 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:54:52 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:54:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:54:52 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:54:52 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:54:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 11:54:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:54:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:54:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:54:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:54:52 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:54:52 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:54:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:54:52 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:54:53 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:54:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 11:54:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:54:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:54:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:54:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:54:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:54:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:54:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:54:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:54:54 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 11:54:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 11:54:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:54:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:54:54 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 11:54:54 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 11:54:54 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 11:54:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 11:54:54 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 11:54:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 11:54:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 11:54:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 11:54:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 11:54:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 11:54:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 11:54:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 11:54:54 INFO JCRServlets:267 - Servlet getItemById 884a1d53-ddf3-45d7-9d3d-d26445613507 +2016-04-07 11:54:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 11:54:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 11:54:55 INFO JCRServlets:142 - Calling servlet getChildrenById 884a1d53-ddf3-45d7-9d3d-d26445613507 by giancarlo.panichi +2016-04-07 11:54:55 INFO WorkspaceExplorerServiceImpl:188 - end time - 1109 msc 1 sec +2016-04-07 11:55:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:55:46 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 11:56:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:56:41 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 11:57:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 11:57:36 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 11:58:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 11:58:31 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 12:00:02 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 12:00:02 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 12:00:02 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 12:00:02 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 12:00:02 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 12:00:02 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 12:00:02 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 12:00:02 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7deec466 +2016-04-07 12:00:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 12:00:02 INFO ASLSession:352 - Logging the entrance +2016-04-07 12:00:02 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 12:00:02 DEBUG TemplateModel:83 - 2016-04-07 12:00:02, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 12:00:02 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:00:02 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 12:00:02 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 112 ms +2016-04-07 12:00:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 12:00:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 12:00:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 12:00:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 12:00:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 12:00:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 12:00:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 12:00:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 12:00:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 12:00:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 12:00:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 12:00:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 12:00:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 12:00:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 12:00:02 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 12:00:02 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 12:00:02 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 12:00:02 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@65fd377 +2016-04-07 12:00:02 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@3951b861 +2016-04-07 12:00:02 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6f2d6538 +2016-04-07 12:00:02 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@4bc56bd6 +2016-04-07 12:00:02 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 99 ms +2016-04-07 12:00:03 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 12:00:03 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 12:00:03 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 12:00:03 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 12:00:03 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 12:00:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 12:00:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 12:00:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 12:00:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 31 ms +2016-04-07 12:00:03 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 12:00:03 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 12:00:03 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 12:00:03 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 12:00:03 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 12:00:03 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 12:00:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 12:00:03 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 12:00:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 12:00:03 INFO ASLSession:352 - Logging the entrance +2016-04-07 12:00:03 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 12:00:03 DEBUG TemplateModel:83 - 2016-04-07 12:00:03, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 12:00:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:00:03 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 12:00:03 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 12:00:03 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 12:00:03 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 12:00:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 12:00:03 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 12:00:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:00:03 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 12:00:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 12:00:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 12:00:03 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 12:00:03 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 12:00:03 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 12:00:03 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 12:00:03 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 12:00:03 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 12:00:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 12:00:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 12:00:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 12:00:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 20 ms +2016-04-07 12:00:03 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 12:00:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 12:00:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 12:00:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 12:00:03 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 12:00:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 12:00:03 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 12:00:04 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 12:00:04 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 12:00:04 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 12:00:04 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 12:00:04 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 12:00:04 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 12:00:04 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 12:00:04 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 12:00:04 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 12:00:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 12:00:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 12:00:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 12:00:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 12:00:04 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 12:00:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 12:00:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 12:00:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 12:00:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 12:00:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 12:00:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 12:00:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 12:00:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 12:00:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 12:00:04 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 12:00:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 12:00:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 12:00:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 12:00:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 12:00:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 12:00:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 12:00:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 12:00:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 12:00:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 12:00:04 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 12:00:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 12:00:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 12:00:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 12:00:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 12:00:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 12:00:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 12:00:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 12:00:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 12:00:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 12:00:04 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 12:00:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 12:00:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 12:00:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 12:00:58 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 12:00:58 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 12:00:58 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 12:00:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 12:00:58 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 12:01:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 12:01:53 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 12:02:33 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 12:02:33 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 12:02:33 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 12:02:34 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 12:02:34 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 12:02:34 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 12:02:34 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 12:02:34 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@747f886a +2016-04-07 12:02:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 12:02:34 INFO ASLSession:352 - Logging the entrance +2016-04-07 12:02:34 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 12:02:34 DEBUG TemplateModel:83 - 2016-04-07 12:02:34, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 12:02:34 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:02:34 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 12:02:34 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 12:02:34 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 12:02:34 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 12:02:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 12:02:34 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 12:02:34 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:02:34 DEBUG DataMinerManagerServiceImpl:564 - getDataMinerWorkArea() +2016-04-07 12:02:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 12:02:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 12:02:34 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 12:02:34 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 12:02:34 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 12:02:34 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 12:02:34 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 12:02:34 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 12:02:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 12:02:34 INFO DefaultScanner:63 - matched 28 resources from 114 urls in 125 ms +2016-04-07 12:02:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 12:02:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 12:02:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 12:02:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 12:02:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 12:02:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 12:02:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 12:02:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 12:02:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 12:02:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 12:02:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 12:02:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 12:02:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 12:02:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 12:02:34 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://node5.d.d4science.research-infrastructures.eu:8000/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 12:02:34 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 12:02:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 12:02:35 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@1756945a +2016-04-07 12:02:35 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@54d46cb4 +2016-04-07 12:02:35 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@7431b238 +2016-04-07 12:02:35 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@6b58cf36 +2016-04-07 12:02:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 65 ms +2016-04-07 12:02:35 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 12:02:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 12:02:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 12:02:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 12:02:35 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 12:02:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 12:02:35 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 12:02:35 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 12:02:35 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 12:02:35 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 12:02:35 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 12:02:35 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 12:02:35 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 12:02:35 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 12:02:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 12:02:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 12:02:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 12:02:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 12:02:35 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-07 12:02:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner by giancarlo.panichi +2016-04-07 12:02:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 12:02:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 12:02:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 12:02:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 12:02:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 12:02:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 12:02:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 12:02:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 12:02:35 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 12:02:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Imported Data by giancarlo.panichi +2016-04-07 12:02:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 12:02:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 12:02:36 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 12:02:36 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 12:02:36 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 12:02:36 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 12:02:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 12:02:36 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 12:02:36 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 12:02:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computed Data by giancarlo.panichi +2016-04-07 12:02:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 12:02:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 12:02:36 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 12:02:36 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 12:02:36 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 12:02:36 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 12:02:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 12:02:36 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 12:02:36 INFO JCRServlets:267 - Servlet getItemById bc72e7c4-c5ff-4304-954f-477164963090 +2016-04-07 12:02:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/DataMiner/Computations by giancarlo.panichi +2016-04-07 12:02:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 12:02:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 12:03:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 12:03:28 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 12:04:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 12:04:23 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 12:05:18 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 12:05:18 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 12:05:18 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 12:05:18 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 12:05:18 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 12:05:18 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 12:05:18 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 12:05:18 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@3994c03d +2016-04-07 12:05:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 12:05:18 INFO ASLSession:352 - Logging the entrance +2016-04-07 12:05:18 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 12:05:18 DEBUG TemplateModel:83 - 2016-04-07 12:05:18, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 12:05:18 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:05:18 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 12:05:47 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 12:05:47 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 12:05:47 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 12:05:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 12:05:47 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 12:05:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:05:47 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 12:05:47 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 149 ms +2016-04-07 12:05:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 12:05:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 12:05:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 12:05:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 12:05:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 12:05:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 12:05:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 12:05:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 12:05:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 12:05:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 12:05:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 12:05:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 12:05:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 12:05:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 12:05:47 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 12:05:47 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 12:05:47 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 12:05:47 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@1158dad6 +2016-04-07 12:05:47 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@5674003e +2016-04-07 12:05:47 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@1b6baa68 +2016-04-07 12:05:47 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@3f4e159f +2016-04-07 12:05:47 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 111 ms +2016-04-07 12:05:47 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 12:05:47 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 12:05:47 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 12:05:47 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 12:05:47 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 12:05:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 12:05:47 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 12:05:47 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 12:05:48 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 23 ms +2016-04-07 12:05:48 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 12:05:48 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 12:05:48 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 12:05:48 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 12:05:48 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 12:05:48 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 12:06:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 12:06:13 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 12:07:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 12:07:08 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 12:08:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 12:08:03 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 12:08:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 12:08:58 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 12:09:40 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 12:09:40 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 12:09:40 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 12:09:40 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 12:09:40 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 12:09:40 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 12:09:40 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 12:09:40 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@403c6815 +2016-04-07 12:09:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 12:09:40 INFO ASLSession:352 - Logging the entrance +2016-04-07 12:09:40 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 12:09:40 DEBUG TemplateModel:83 - 2016-04-07 12:09:40, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 12:09:40 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:09:40 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 12:10:35 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 12:10:35 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 12:10:35 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 12:10:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 12:10:35 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 12:11:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 12:11:30 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 12:12:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 12:12:25 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 12:13:35 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 12:13:35 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 12:13:35 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 12:13:35 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 12:13:35 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 12:13:35 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 12:13:35 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 12:13:35 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@5a827a47 +2016-04-07 12:13:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 12:13:35 INFO ASLSession:352 - Logging the entrance +2016-04-07 12:13:35 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 12:13:35 DEBUG TemplateModel:83 - 2016-04-07 12:13:35, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 12:13:35 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:13:35 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 12:14:30 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 12:14:30 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 12:14:30 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 12:14:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 12:14:30 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 12:15:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 12:15:25 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 12:16:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 12:16:20 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 12:17:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 12:17:15 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 12:18:01 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 12:18:01 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 12:18:01 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 12:18:01 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 12:18:01 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 12:18:01 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 12:18:01 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 12:18:01 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2feadcf9 +2016-04-07 12:18:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 12:18:01 INFO ASLSession:352 - Logging the entrance +2016-04-07 12:18:01 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 12:18:01 DEBUG TemplateModel:83 - 2016-04-07 12:18:01, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 12:18:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:18:01 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 12:18:56 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 12:18:56 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 12:18:56 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 12:18:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 12:18:56 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 12:19:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 12:19:51 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 12:20:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 12:20:46 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 12:21:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 12:21:41 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 12:22:15 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 12:22:15 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 12:22:15 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 12:22:15 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 12:22:15 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 12:22:15 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 12:22:15 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 12:22:15 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@54e059ef +2016-04-07 12:22:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 12:22:15 INFO ASLSession:352 - Logging the entrance +2016-04-07 12:22:15 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 12:22:15 DEBUG TemplateModel:83 - 2016-04-07 12:22:15, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 12:22:15 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:22:15 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 12:23:10 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 12:23:10 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 12:23:10 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 12:23:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 12:23:10 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 12:24:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 12:24:05 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 12:25:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 12:25:00 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 12:25:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 12:25:55 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 12:26:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 12:26:50 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 12:27:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 12:27:45 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 12:28:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 12:28:40 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 12:29:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 12:29:35 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 12:30:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 12:30:30 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 12:31:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 12:31:24 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 12:31:24 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:31:24 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 12:31:24 INFO DefaultScanner:63 - matched 28 resources from 112 urls in 140 ms +2016-04-07 12:31:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 12:31:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 12:31:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 12:31:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 12:31:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 12:31:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 12:31:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 12:31:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 12:31:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 12:31:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 12:31:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 12:31:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 12:31:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 12:31:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 12:31:25 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 12:31:25 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 12:31:25 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 12:31:25 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@5129b040 +2016-04-07 12:31:25 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@527c540 +2016-04-07 12:31:25 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@332e7a5d +2016-04-07 12:31:25 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@6d4f0d00 +2016-04-07 12:31:25 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 110 ms +2016-04-07 12:31:25 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 12:31:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 12:31:25 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 12:31:29 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 12:31:29 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 12:31:29 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 12:31:29 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 12:31:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 12:31:29 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 12:31:29 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 12:31:29 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-07 12:31:29 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 12:31:29 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 12:31:29 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 12:31:29 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 12:33:30 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 12:33:30 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 12:33:30 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 12:33:30 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 12:33:30 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 12:33:30 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 12:33:30 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 12:33:30 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@d878ce +2016-04-07 12:33:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 12:33:30 INFO ASLSession:352 - Logging the entrance +2016-04-07 12:33:30 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 12:33:30 DEBUG TemplateModel:83 - 2016-04-07 12:33:30, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 12:33:30 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:33:30 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 12:34:25 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 12:34:25 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 12:34:25 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 12:34:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 12:34:25 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 12:35:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 12:35:20 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 12:36:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 12:36:15 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 12:37:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 12:37:10 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 12:38:13 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 12:38:13 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 12:38:13 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 12:38:13 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 12:38:13 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 12:38:13 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 12:38:13 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 12:38:13 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@52235de7 +2016-04-07 12:38:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 12:38:13 INFO ASLSession:352 - Logging the entrance +2016-04-07 12:38:13 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 12:38:13 DEBUG TemplateModel:83 - 2016-04-07 12:38:13, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 12:38:13 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:38:13 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 12:39:08 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 12:39:08 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 12:39:08 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 12:39:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 12:39:08 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 12:40:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 12:40:03 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 12:40:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 12:40:58 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 12:42:18 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 12:42:18 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 12:42:18 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 12:42:18 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 12:42:18 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 12:42:18 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 12:42:18 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 12:42:18 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@462d8cf +2016-04-07 12:42:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 12:42:18 INFO ASLSession:352 - Logging the entrance +2016-04-07 12:42:18 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 12:42:18 DEBUG TemplateModel:83 - 2016-04-07 12:42:18, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 12:42:18 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:42:18 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 12:43:13 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 12:43:13 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 12:43:13 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 12:43:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 12:43:13 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 12:43:40 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 12:43:40 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 12:43:40 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 12:43:40 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 12:43:40 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 12:43:40 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 12:43:40 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 12:43:40 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@49c1e3a7 +2016-04-07 12:43:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 12:43:40 INFO ASLSession:352 - Logging the entrance +2016-04-07 12:43:40 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 12:43:40 DEBUG TemplateModel:83 - 2016-04-07 12:43:40, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 12:43:40 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:43:40 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 12:44:35 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 12:44:35 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 12:44:35 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 12:44:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 12:44:35 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 12:45:47 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 12:45:47 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 12:45:47 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 12:45:47 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 12:45:47 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 12:45:47 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 12:45:47 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 12:45:47 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7be3cd53 +2016-04-07 12:45:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 12:45:47 INFO ASLSession:352 - Logging the entrance +2016-04-07 12:45:47 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 12:45:47 DEBUG TemplateModel:83 - 2016-04-07 12:45:47, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 12:45:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:45:47 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 12:46:42 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 12:46:42 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 12:46:42 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 12:46:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 12:46:42 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 12:47:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 12:47:37 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 12:48:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 12:48:32 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 12:49:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 12:49:27 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 12:50:11 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 12:50:11 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 12:50:11 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 12:50:11 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 12:50:11 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 12:50:11 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 12:50:11 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 12:50:11 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@211ddae4 +2016-04-07 12:50:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 12:50:11 INFO ASLSession:352 - Logging the entrance +2016-04-07 12:50:11 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 12:50:11 DEBUG TemplateModel:83 - 2016-04-07 12:50:11, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 12:50:11 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:50:11 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 12:52:53 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 12:52:53 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 12:52:53 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 12:52:53 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 12:52:53 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 12:52:53 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 12:52:53 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 12:52:53 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@4f68b70f +2016-04-07 12:52:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 12:52:53 INFO ASLSession:352 - Logging the entrance +2016-04-07 12:52:53 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 12:52:53 DEBUG TemplateModel:83 - 2016-04-07 12:52:53, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 12:52:53 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:52:53 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 12:53:18 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 12:53:18 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 12:53:18 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 12:53:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 12:53:18 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 12:53:18 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 12:53:18 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 12:53:18 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 126 ms +2016-04-07 12:53:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 12:53:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 12:53:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 12:53:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 12:53:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 12:53:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 12:53:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 12:53:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 12:53:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 12:53:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 12:53:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 12:53:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 12:53:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 12:53:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 12:53:18 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 12:53:19 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 12:53:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 12:53:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@288b4638 +2016-04-07 12:53:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@8c26def +2016-04-07 12:53:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@3a7ec87f +2016-04-07 12:53:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@156b18c9 +2016-04-07 12:53:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 355 ms +2016-04-07 12:53:19 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 12:53:19 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 12:53:19 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 12:53:19 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 12:53:19 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 12:53:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 12:53:19 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 12:53:20 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 12:53:20 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 19 ms +2016-04-07 12:53:20 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 12:53:20 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 12:53:20 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 12:53:20 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 12:53:20 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 12:53:20 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 12:53:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 12:53:48 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 12:54:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 12:54:43 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 12:55:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 12:55:38 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 12:56:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 12:56:33 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 12:57:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 12:57:28 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 12:58:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 12:58:23 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 12:59:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 12:59:18 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 13:00:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 13:00:13 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 13:01:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 13:01:08 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 13:02:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 13:02:03 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 13:02:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 13:02:58 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 13:04:28 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 13:04:28 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 13:04:28 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 13:04:28 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 13:04:28 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 13:04:28 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 13:04:28 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 13:04:28 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2765208f +2016-04-07 13:04:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 13:04:28 INFO ASLSession:352 - Logging the entrance +2016-04-07 13:04:28 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 13:04:28 DEBUG TemplateModel:83 - 2016-04-07 13:04:28, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 13:04:28 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 13:04:28 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 13:04:33 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 13:04:33 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 13:04:33 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 13:04:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 13:04:33 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 13:04:33 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 13:04:33 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 13:04:33 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 125 ms +2016-04-07 13:04:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 13:04:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 13:04:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 13:04:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 13:04:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 13:04:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 13:04:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 13:04:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 13:04:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 13:04:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 13:04:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 13:04:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 13:04:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 13:04:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 13:04:33 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 13:04:33 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 13:04:33 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 13:04:33 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@3ec06e9e +2016-04-07 13:04:33 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@6b7b0d06 +2016-04-07 13:04:33 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@4685222f +2016-04-07 13:04:33 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@21873728 +2016-04-07 13:04:33 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 107 ms +2016-04-07 13:04:33 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 13:04:33 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 13:04:33 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 13:04:33 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 13:04:33 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 13:04:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 13:04:33 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 13:04:33 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 13:04:33 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 19 ms +2016-04-07 13:04:33 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 13:04:33 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 13:04:33 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 13:04:33 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 13:04:34 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 13:04:34 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 13:05:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 13:05:23 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 13:06:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 13:06:07 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 13:06:07 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 13:06:07 INFO SClient4WPS:643 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS +2016-04-07 13:06:07 DEBUG SClient4WPS:276 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 13:06:07 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 13:06:07 DEBUG SClient4WPS:297 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS + XMEANS + A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + + + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + + + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 13:06:07 DEBUG SClient4WPS:301 - WPSClient->Fetching Inputs +2016-04-07 13:06:07 DEBUG SClient4WPS:303 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 13:06:07 DEBUG SClient4WPS:303 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 13:06:07 DEBUG SClient4WPS:303 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 13:06:07 DEBUG SClient4WPS:303 - WPSClient->Input: + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + +2016-04-07 13:06:07 DEBUG SClient4WPS:303 - WPSClient->Input: + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + +2016-04-07 13:06:07 DEBUG SClient4WPS:303 - WPSClient->Input: + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + +2016-04-07 13:06:07 DEBUG SClient4WPS:303 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-07 13:06:07 DEBUG SClient4WPS:308 - WPSClient->Fetching Outputs +2016-04-07 13:06:07 DEBUG SClient4WPS:310 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 13:06:07 DEBUG SClient4WPS:310 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 13:06:07 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 13:06:07 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 13:06:07 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 13:06:07 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 13:06:07 DEBUG WPS2SM:201 - Schema: null +2016-04-07 13:06:07 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 13:06:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 13:06:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 13:06:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 13:06:07 DEBUG SClient4WPS:658 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 13:06:07 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 13:06:07 DEBUG WPS2SM:93 - WPS type: +2016-04-07 13:06:07 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 13:06:07 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 13:06:07 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 13:06:07 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 13:06:07 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 13:06:07 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 13:06:07 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 13:06:07 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 13:06:07 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 13:06:07 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 13:06:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 13:06:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 13:06:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 13:06:07 DEBUG SClient4WPS:658 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 13:06:07 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 13:06:07 DEBUG WPS2SM:93 - WPS type: +2016-04-07 13:06:07 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 13:06:07 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 13:06:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 13:06:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 13:06:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 13:06:07 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 13:06:07 DEBUG WPS2SM:254 - Conversion to SM Type->maxIterations is a Literal Input +2016-04-07 13:06:07 DEBUG WPS2SM:93 - WPS type: +2016-04-07 13:06:07 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 13:06:07 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 13:06:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:XMeans max number of overall iterations of the clustering learning +2016-04-07 13:06:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxIterations +2016-04-07 13:06:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 13:06:07 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 13:06:07 DEBUG WPS2SM:254 - Conversion to SM Type->minClusters is a Literal Input +2016-04-07 13:06:07 DEBUG WPS2SM:93 - WPS type: +2016-04-07 13:06:07 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 13:06:07 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 13:06:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:minimum number of expected clusters +2016-04-07 13:06:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minClusters +2016-04-07 13:06:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 13:06:07 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 13:06:07 DEBUG WPS2SM:254 - Conversion to SM Type->maxClusters is a Literal Input +2016-04-07 13:06:07 DEBUG WPS2SM:93 - WPS type: +2016-04-07 13:06:07 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 13:06:07 DEBUG WPS2SM:101 - Guessed default value: 50 +2016-04-07 13:06:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of clusters to produce +2016-04-07 13:06:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxClusters +2016-04-07 13:06:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 13:06:07 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT] +2016-04-07 13:06:07 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 13:06:07 DEBUG WPS2SM:93 - WPS type: +2016-04-07 13:06:07 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 13:06:07 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-07 13:06:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-07 13:06:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 13:06:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 13:06:07 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-07 13:06:07 DEBUG SClient4WPS:662 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-07 13:06:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 13:06:08 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 13:06:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 13:06:08 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 13:06:08 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 13:06:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 13:06:08 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 13:06:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 13:06:08 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 13:06:08 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 13:06:08 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 13:06:08 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 13:06:08 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 13:06:08 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 13:06:08 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 13:06:08 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 13:06:08 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 13:06:08 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 13:06:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 13:06:08 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 13:06:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 13:06:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 37 ms +2016-04-07 13:06:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 13:06:08 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 13:06:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 13:06:08 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 13:06:08 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 13:06:08 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 13:06:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 13:06:08 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 13:06:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 13:06:08 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 13:06:08 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 13:06:08 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 13:06:08 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 13:06:08 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 13:06:08 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 13:06:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 13:06:08 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 13:06:08 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 13:06:08 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 13:06:08 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 13:06:08 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 13:06:08 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 13:06:08 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 13:06:08 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 13:06:08 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 13:06:08 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 13:06:08 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 13:06:08 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 13:06:08 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 13:06:08 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 13:06:09 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 13:06:09 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 13:06:09 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 13:06:09 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 13:06:09 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 13:06:09 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 13:06:09 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 13:06:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 13:06:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 13:06:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 13:06:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 13:06:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 13:06:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 13:06:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 13:06:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 13:06:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 13:06:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 13:06:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 13:06:09 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 13:06:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 13:06:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 13:06:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 13:06:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 13:06:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 13:06:09 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 13:06:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 13:06:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 13:06:09 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 13:06:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 13:06:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 13:06:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 13:06:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 13:06:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 13:06:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 13:06:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 13:06:09 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 13:06:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 13:06:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 13:06:09 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 13:06:09 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 13:06:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 35 ms +2016-04-07 13:06:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 13:06:09 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 13:06:09 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 13:06:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-07 13:06:10 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 13:06:10 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 13:06:10 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 13:06:10 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 13:06:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 13:06:10 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 13:06:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 13:06:10 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 13:06:10 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 13:06:10 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-07 13:06:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 13:06:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 13:06:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 13:06:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 13:06:10 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 13:06:10 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 13:06:10 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 13:06:10 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 13:06:10 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 13:06:10 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 13:06:10 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 13:06:10 INFO WorkspaceExplorerServiceImpl:142 - end time - 532 msc 0 sec +2016-04-07 13:06:10 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 13:06:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 13:06:18 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 13:07:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 13:07:13 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 13:08:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 13:08:08 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 13:09:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 13:09:03 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 13:09:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 13:09:58 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 13:10:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 13:10:53 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 13:12:18 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 13:12:18 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 13:12:18 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 13:12:18 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 13:12:18 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 13:12:18 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 13:12:18 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 13:12:18 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@575467bb +2016-04-07 13:12:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 13:12:18 INFO ASLSession:352 - Logging the entrance +2016-04-07 13:12:18 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 13:12:18 DEBUG TemplateModel:83 - 2016-04-07 13:12:18, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 13:12:18 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 13:12:18 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 13:12:22 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 13:12:22 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 13:12:22 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 13:12:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 13:12:22 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 13:12:22 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 13:12:22 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 13:12:22 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 112 ms +2016-04-07 13:12:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 13:12:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 13:12:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 13:12:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 13:12:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 13:12:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 13:12:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 13:12:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 13:12:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 13:12:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 13:12:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 13:12:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 13:12:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 13:12:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 13:12:23 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 13:12:23 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 13:12:23 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 13:12:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@224d2bd4 +2016-04-07 13:12:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@45c5318a +2016-04-07 13:12:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@34b4c990 +2016-04-07 13:12:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@12a2b4d4 +2016-04-07 13:12:23 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 109 ms +2016-04-07 13:12:23 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 13:12:23 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 13:12:23 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 13:12:23 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 13:12:23 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 13:12:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 13:12:23 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 13:12:23 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 13:12:23 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 26 ms +2016-04-07 13:12:23 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 13:12:23 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 13:12:23 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 13:12:23 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 13:12:24 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 13:12:24 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 13:13:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 13:13:13 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 13:14:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 13:14:08 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 13:15:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 13:15:03 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 13:15:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 13:15:58 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 13:16:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 13:16:53 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 13:17:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 13:17:48 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 13:18:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 13:18:43 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 13:19:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 13:19:38 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 13:20:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 13:20:33 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 13:21:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 13:21:28 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 13:22:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 13:22:23 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 13:23:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 13:23:18 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 13:24:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 13:24:13 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 13:25:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 13:25:08 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 13:26:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 13:26:03 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 13:26:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 13:26:58 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 13:27:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 13:27:53 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 13:28:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 13:28:48 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 13:29:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 13:29:43 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 13:30:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 13:30:38 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 13:31:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 13:31:33 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 13:32:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 13:32:28 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 13:33:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 13:33:23 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 13:34:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 13:34:18 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 13:35:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 13:35:13 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 13:36:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 13:36:08 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 13:37:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 13:37:03 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 13:37:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 13:37:58 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 13:38:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 13:38:53 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 13:39:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 13:39:48 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 13:40:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 13:40:43 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 13:41:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 13:41:38 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 13:42:33 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 13:42:33 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 13:42:33 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 13:42:33 WARN SessionCheckerServiceImpl:80 - Scope is null at Thu Apr 07 13:42:33 CEST 2016 +2016-04-07 13:42:33 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 13:47:18 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 13:47:18 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 13:47:18 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 14:07:10 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 14:07:10 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 14:07:10 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 14:07:10 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 14:07:10 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 14:07:10 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 14:07:10 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 14:07:10 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7aea9d4e +2016-04-07 14:07:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 14:07:10 INFO ASLSession:352 - Logging the entrance +2016-04-07 14:07:10 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 14:07:10 DEBUG TemplateModel:83 - 2016-04-07 14:07:10, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 14:07:10 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 14:07:10 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 14:07:13 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 14:07:13 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 14:07:13 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 14:07:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 14:07:13 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 14:07:13 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 14:07:13 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 14:07:13 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 148 ms +2016-04-07 14:07:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 14:07:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 14:07:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 14:07:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 14:07:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 14:07:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 14:07:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 14:07:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 14:07:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 14:07:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 14:07:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 14:07:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 14:07:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 14:07:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 14:07:13 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 14:07:14 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:07:14 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 14:07:14 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@4ba1b3f1 +2016-04-07 14:07:14 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1f912fef +2016-04-07 14:07:14 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@72ac3f88 +2016-04-07 14:07:14 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@27a3bcbd +2016-04-07 14:07:14 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 105 ms +2016-04-07 14:07:14 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 14:07:14 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 14:07:14 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 14:07:14 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 14:07:14 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 14:07:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 14:07:14 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:07:14 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 14:07:14 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 26 ms +2016-04-07 14:07:14 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 14:07:14 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 14:07:14 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 14:07:14 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 14:07:15 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 14:07:15 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 14:08:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 14:08:05 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 14:09:21 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 14:09:21 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 14:09:21 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 14:09:21 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 14:09:21 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 14:09:21 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 14:09:21 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 14:09:21 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@1cecca67 +2016-04-07 14:09:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 14:09:21 INFO ASLSession:352 - Logging the entrance +2016-04-07 14:09:21 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 14:09:21 DEBUG TemplateModel:83 - 2016-04-07 14:09:21, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 14:09:21 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 14:09:21 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 14:09:27 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 14:09:27 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 14:09:27 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 14:09:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 14:09:27 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 14:09:27 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 14:09:27 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 14:09:28 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 147 ms +2016-04-07 14:09:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 14:09:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 14:09:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 14:09:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 14:09:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 14:09:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 14:09:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 14:09:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 14:09:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 14:09:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 14:09:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 14:09:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 14:09:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 14:09:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 14:09:28 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 14:09:28 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:09:28 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 14:09:28 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@7082b0fb +2016-04-07 14:09:28 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@5908d343 +2016-04-07 14:09:28 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@3a18e1dc +2016-04-07 14:09:28 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@664e4a04 +2016-04-07 14:09:28 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 117 ms +2016-04-07 14:09:28 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 14:09:28 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 14:09:28 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 14:09:28 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 14:09:28 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 14:09:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 14:09:28 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:09:28 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 14:09:28 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-07 14:09:28 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 14:09:28 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 14:09:28 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 14:09:28 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 14:09:29 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 14:09:29 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 14:10:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 14:10:16 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 14:11:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 14:11:11 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 14:12:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 14:12:06 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 14:13:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 14:13:01 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 14:13:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 14:13:56 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 14:15:08 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 14:15:08 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 14:15:08 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 14:15:08 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 14:15:08 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 14:15:08 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 14:15:08 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 14:15:08 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7bc347e8 +2016-04-07 14:15:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 14:15:08 INFO ASLSession:352 - Logging the entrance +2016-04-07 14:15:08 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 14:15:08 DEBUG TemplateModel:83 - 2016-04-07 14:15:08, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 14:15:08 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 14:15:08 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 14:15:15 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 14:15:15 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 14:15:15 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 14:15:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 14:15:15 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 14:15:15 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 14:15:15 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 14:15:15 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 164 ms +2016-04-07 14:15:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 14:15:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 14:15:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 14:15:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 14:15:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 14:15:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 14:15:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 14:15:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 14:15:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 14:15:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 14:15:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 14:15:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 14:15:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 14:15:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 14:15:15 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 14:15:15 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:15:15 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 14:15:15 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@10fdcaa5 +2016-04-07 14:15:15 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@5a824f24 +2016-04-07 14:15:15 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6443c0a8 +2016-04-07 14:15:15 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@6757fd2b +2016-04-07 14:15:15 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 103 ms +2016-04-07 14:15:15 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 14:15:15 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 14:15:15 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 14:15:15 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 14:15:15 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 14:15:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 14:15:15 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:15:15 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 14:15:15 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-07 14:15:15 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 14:15:15 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 14:15:15 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 14:15:15 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 14:15:16 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 14:15:16 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 14:27:22 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 14:27:22 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 14:27:22 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 14:27:23 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 14:27:23 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 14:27:23 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 14:27:23 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 14:27:23 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@653f1e5c +2016-04-07 14:27:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 14:27:23 INFO ASLSession:352 - Logging the entrance +2016-04-07 14:27:23 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 14:27:23 DEBUG TemplateModel:83 - 2016-04-07 14:27:23, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 14:27:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 14:27:23 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 14:27:28 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 14:27:28 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 14:27:28 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 14:27:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 14:27:28 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 14:27:28 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 14:27:28 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 14:27:28 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 121 ms +2016-04-07 14:27:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 14:27:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 14:27:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 14:27:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 14:27:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 14:27:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 14:27:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 14:27:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 14:27:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 14:27:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 14:27:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 14:27:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 14:27:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 14:27:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 14:27:28 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 14:27:28 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:27:29 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 14:27:29 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@526883a2 +2016-04-07 14:27:29 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@98c0cef +2016-04-07 14:27:29 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6ddc5ef6 +2016-04-07 14:27:29 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@1295ad39 +2016-04-07 14:27:29 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 285 ms +2016-04-07 14:27:29 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 14:27:29 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 14:27:29 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 14:27:29 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 14:27:29 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 14:27:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 14:27:29 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:27:29 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 14:27:29 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 19 ms +2016-04-07 14:27:29 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 14:27:29 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 14:27:29 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 14:27:29 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 14:27:30 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 14:27:30 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 14:28:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 14:28:17 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 14:29:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 14:29:12 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 14:30:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 14:30:07 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 14:31:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 14:31:02 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 14:31:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 14:31:57 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 14:32:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 14:32:52 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 14:35:01 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 14:35:01 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 14:35:01 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 14:35:01 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 14:35:01 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 14:35:01 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 14:35:01 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 14:35:01 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7addd08d +2016-04-07 14:35:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 14:35:01 INFO ASLSession:352 - Logging the entrance +2016-04-07 14:35:01 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 14:35:01 DEBUG TemplateModel:83 - 2016-04-07 14:35:01, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 14:35:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 14:35:01 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 14:35:05 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 14:35:05 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 14:35:05 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 14:35:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 14:35:05 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 14:35:05 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 14:35:05 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 14:35:05 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 147 ms +2016-04-07 14:35:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 14:35:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 14:35:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 14:35:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 14:35:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 14:35:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 14:35:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 14:35:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 14:35:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 14:35:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 14:35:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 14:35:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 14:35:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 14:35:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 14:35:05 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 14:35:06 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:35:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 14:35:06 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@118b96f0 +2016-04-07 14:35:06 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@24eca618 +2016-04-07 14:35:06 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@704ea244 +2016-04-07 14:35:06 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@7f352791 +2016-04-07 14:35:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 108 ms +2016-04-07 14:35:06 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 14:35:06 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 14:35:06 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 14:35:06 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 14:35:06 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 14:35:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 14:35:06 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:35:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 14:35:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-07 14:35:06 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 14:35:06 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 14:35:06 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 14:35:06 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 14:35:07 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 14:35:07 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 14:35:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 14:35:56 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 14:36:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-07 14:36:51 DEBUG ASLSession:458 - Getting security token: null in thread 28 +2016-04-07 14:37:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-07 14:37:46 DEBUG ASLSession:458 - Getting security token: null in thread 28 +2016-04-07 14:38:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 14:38:41 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 14:39:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 14:39:36 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 14:40:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 14:40:31 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 14:41:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 14:41:26 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 14:42:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 14:42:21 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 14:43:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-07 14:43:16 DEBUG ASLSession:458 - Getting security token: null in thread 28 +2016-04-07 14:43:54 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 14:43:54 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 14:43:54 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 14:43:54 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 14:43:54 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 14:43:54 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 14:43:54 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 14:43:54 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7b2e1dd0 +2016-04-07 14:43:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 14:43:54 INFO ASLSession:352 - Logging the entrance +2016-04-07 14:43:54 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 14:43:54 DEBUG TemplateModel:83 - 2016-04-07 14:43:54, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 14:43:54 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 14:43:54 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 14:43:59 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 14:43:59 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 14:43:59 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 14:43:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 14:43:59 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 14:43:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 14:43:59 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 14:43:59 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 159 ms +2016-04-07 14:43:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 14:43:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 14:43:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 14:43:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 14:43:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 14:43:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 14:43:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 14:43:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 14:43:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 14:43:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 14:43:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 14:43:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 14:43:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 14:43:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 14:43:59 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 14:43:59 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:43:59 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 14:43:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@7ddfce1d +2016-04-07 14:43:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@56c4cf53 +2016-04-07 14:43:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@63b5cfca +2016-04-07 14:43:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@4a4c388d +2016-04-07 14:43:59 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 127 ms +2016-04-07 14:44:00 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 14:44:00 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 14:44:00 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 14:44:00 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 14:44:00 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 14:44:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 14:44:00 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:44:00 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 14:44:00 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 21 ms +2016-04-07 14:44:00 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 14:44:00 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 14:44:00 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 14:44:00 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 14:44:01 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 14:44:01 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 14:44:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 14:44:49 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 14:45:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 14:45:44 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 14:45:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 14:45:49 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 14:45:49 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 14:45:49 INFO SClient4WPS:643 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING +2016-04-07 14:45:49 DEBUG SClient4WPS:276 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 14:45:49 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 14:45:49 DEBUG SClient4WPS:297 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING + MAX_ENT_NICHE_MODELLING + A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt + + + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + + + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + + + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + + + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + + + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + + + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + + + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + + + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + + + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + + + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + + + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + + + + + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + + + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + + + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + + + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + + + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + + + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 14:45:49 DEBUG SClient4WPS:301 - WPSClient->Fetching Inputs +2016-04-07 14:45:49 DEBUG SClient4WPS:303 - WPSClient->Input: + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + +2016-04-07 14:45:49 DEBUG SClient4WPS:303 - WPSClient->Input: + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + +2016-04-07 14:45:49 DEBUG SClient4WPS:303 - WPSClient->Input: + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + +2016-04-07 14:45:49 DEBUG SClient4WPS:303 - WPSClient->Input: + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + +2016-04-07 14:45:49 DEBUG SClient4WPS:303 - WPSClient->Input: + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 14:45:49 DEBUG SClient4WPS:303 - WPSClient->Input: + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + +2016-04-07 14:45:49 DEBUG SClient4WPS:303 - WPSClient->Input: + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + +2016-04-07 14:45:49 DEBUG SClient4WPS:303 - WPSClient->Input: + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + +2016-04-07 14:45:49 DEBUG SClient4WPS:303 - WPSClient->Input: + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + +2016-04-07 14:45:49 DEBUG SClient4WPS:303 - WPSClient->Input: + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + +2016-04-07 14:45:49 DEBUG SClient4WPS:303 - WPSClient->Input: + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + +2016-04-07 14:45:49 DEBUG SClient4WPS:303 - WPSClient->Input: + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + +2016-04-07 14:45:49 DEBUG SClient4WPS:308 - WPSClient->Fetching Outputs +2016-04-07 14:45:49 DEBUG SClient4WPS:310 - WPSClient->Output: + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + +2016-04-07 14:45:49 DEBUG SClient4WPS:310 - WPSClient->Output: + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + +2016-04-07 14:45:49 DEBUG SClient4WPS:310 - WPSClient->Output: + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + +2016-04-07 14:45:49 DEBUG SClient4WPS:310 - WPSClient->Output: + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + +2016-04-07 14:45:49 DEBUG SClient4WPS:310 - WPSClient->Output: + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + +2016-04-07 14:45:49 DEBUG SClient4WPS:310 - WPSClient->Output: + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 14:45:49 DEBUG SClient4WPS:310 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 14:45:49 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 14:45:49 DEBUG WPS2SM:254 - Conversion to SM Type->OutputTableLabel is a Literal Input +2016-04-07 14:45:49 DEBUG WPS2SM:93 - WPS type: +2016-04-07 14:45:49 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 14:45:49 DEBUG WPS2SM:101 - Guessed default value: maxent_ +2016-04-07 14:45:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the table to produce +2016-04-07 14:45:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OutputTableLabel +2016-04-07 14:45:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 14:45:49 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT] +2016-04-07 14:45:49 DEBUG WPS2SM:254 - Conversion to SM Type->SpeciesName is a Literal Input +2016-04-07 14:45:49 DEBUG WPS2SM:93 - WPS type: +2016-04-07 14:45:49 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 14:45:49 DEBUG WPS2SM:101 - Guessed default value: generic_species +2016-04-07 14:45:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the species to model and the occurrence records refer to +2016-04-07 14:45:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:SpeciesName +2016-04-07 14:45:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 14:45:49 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT] +2016-04-07 14:45:49 DEBUG WPS2SM:254 - Conversion to SM Type->MaxIterations is a Literal Input +2016-04-07 14:45:49 DEBUG WPS2SM:93 - WPS type: +2016-04-07 14:45:49 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 14:45:49 DEBUG WPS2SM:101 - Guessed default value: 1000 +2016-04-07 14:45:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The number of learning iterations of the MaxEnt algorithm +2016-04-07 14:45:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:MaxIterations +2016-04-07 14:45:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 14:45:49 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT] +2016-04-07 14:45:49 DEBUG WPS2SM:254 - Conversion to SM Type->DefaultPrevalence is a Literal Input +2016-04-07 14:45:49 DEBUG WPS2SM:93 - WPS type: +2016-04-07 14:45:49 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 14:45:49 DEBUG WPS2SM:101 - Guessed default value: 0.5 +2016-04-07 14:45:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A priori probability of presence at ordinary occurrence points +2016-04-07 14:45:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:DefaultPrevalence +2016-04-07 14:45:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 14:45:49 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT] +2016-04-07 14:45:49 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencesTable is a Complex Input +2016-04-07 14:45:49 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 14:45:49 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 14:45:49 DEBUG WPS2SM:201 - Schema: null +2016-04-07 14:45:49 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 14:45:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] +2016-04-07 14:45:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencesTable +2016-04-07 14:45:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 14:45:49 DEBUG SClient4WPS:658 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 14:45:49 DEBUG WPS2SM:254 - Conversion to SM Type->LongitudeColumn is a Literal Input +2016-04-07 14:45:49 DEBUG WPS2SM:93 - WPS type: +2016-04-07 14:45:49 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 14:45:49 DEBUG WPS2SM:101 - Guessed default value: decimallongitude +2016-04-07 14:45:49 DEBUG WPS2SM:130 - Machter title: The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude] +2016-04-07 14:45:49 DEBUG WPS2SM:131 - Machter find: true +2016-04-07 14:45:49 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-07 14:45:49 DEBUG WPS2SM:133 - Machter start: 40 +2016-04-07 14:45:49 DEBUG WPS2SM:134 - Machter end: 82 +2016-04-07 14:45:49 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-07 14:45:49 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-07 14:45:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing longitude values [the name of a column from OccurrencesTable] +2016-04-07 14:45:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LongitudeColumn +2016-04-07 14:45:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 14:45:49 DEBUG SClient4WPS:658 - InputParameter: Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN] +2016-04-07 14:45:49 DEBUG WPS2SM:254 - Conversion to SM Type->LatitudeColumn is a Literal Input +2016-04-07 14:45:49 DEBUG WPS2SM:93 - WPS type: +2016-04-07 14:45:49 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 14:45:49 DEBUG WPS2SM:101 - Guessed default value: decimallatitude +2016-04-07 14:45:49 DEBUG WPS2SM:130 - Machter title: The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude] +2016-04-07 14:45:49 DEBUG WPS2SM:131 - Machter find: true +2016-04-07 14:45:49 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-07 14:45:49 DEBUG WPS2SM:133 - Machter start: 39 +2016-04-07 14:45:49 DEBUG WPS2SM:134 - Machter end: 81 +2016-04-07 14:45:49 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-07 14:45:49 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-07 14:45:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing latitude values [the name of a column from OccurrencesTable] +2016-04-07 14:45:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LatitudeColumn +2016-04-07 14:45:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 14:45:49 DEBUG SClient4WPS:658 - InputParameter: Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN] +2016-04-07 14:45:49 DEBUG WPS2SM:254 - Conversion to SM Type->XResolution is a Literal Input +2016-04-07 14:45:49 DEBUG WPS2SM:93 - WPS type: +2016-04-07 14:45:49 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 14:45:49 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 14:45:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the X axis in decimal degrees +2016-04-07 14:45:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:XResolution +2016-04-07 14:45:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 14:45:49 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 14:45:49 DEBUG WPS2SM:254 - Conversion to SM Type->YResolution is a Literal Input +2016-04-07 14:45:49 DEBUG WPS2SM:93 - WPS type: +2016-04-07 14:45:49 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 14:45:49 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 14:45:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the Y axis in decimal degrees +2016-04-07 14:45:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:YResolution +2016-04-07 14:45:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 14:45:49 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 14:45:49 DEBUG WPS2SM:254 - Conversion to SM Type->Layers is a Literal Input +2016-04-07 14:45:49 DEBUG WPS2SM:93 - WPS type: +2016-04-07 14:45:49 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 14:45:49 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 14:45:49 DEBUG WPS2SM:147 - Machter title: The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 14:45:49 DEBUG WPS2SM:148 - Machter find: true +2016-04-07 14:45:49 DEBUG WPS2SM:149 - Machter group: a sequence of values separated by | +2016-04-07 14:45:49 DEBUG WPS2SM:150 - Machter start: 501 +2016-04-07 14:45:49 DEBUG WPS2SM:151 - Machter end: 536 +2016-04-07 14:45:49 DEBUG WPS2SM:152 - Machter Group Count: 1 +2016-04-07 14:45:49 DEBUG WPS2SM:155 - Matcher separator: | +2016-04-07 14:45:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) +2016-04-07 14:45:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Layers +2016-04-07 14:45:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 14:45:49 DEBUG SClient4WPS:658 - InputParameter: ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST] +2016-04-07 14:45:49 DEBUG WPS2SM:254 - Conversion to SM Type->Z is a Literal Input +2016-04-07 14:45:49 DEBUG WPS2SM:93 - WPS type: +2016-04-07 14:45:49 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 14:45:49 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-07 14:45:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer +2016-04-07 14:45:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Z +2016-04-07 14:45:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 14:45:49 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-07 14:45:49 DEBUG WPS2SM:254 - Conversion to SM Type->TimeIndex is a Literal Input +2016-04-07 14:45:49 DEBUG WPS2SM:93 - WPS type: +2016-04-07 14:45:49 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 14:45:49 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-07 14:45:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Time Index. The default is the first time indexed in the input environmental datasets +2016-04-07 14:45:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:TimeIndex +2016-04-07 14:45:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 14:45:49 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-07 14:45:49 DEBUG SClient4WPS:662 - Parameters: [ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT], TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN], Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST], ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT]] +2016-04-07 14:45:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 14:45:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 14:45:50 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 14:45:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 14:45:50 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 14:45:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 14:45:50 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 14:45:50 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 14:45:50 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 14:45:50 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 14:45:50 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 14:45:50 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 14:45:50 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 14:45:50 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 14:45:50 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 14:45:50 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 14:45:50 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 14:45:50 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 14:45:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 14:45:50 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:45:50 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 14:45:50 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-07 14:45:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 14:45:50 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 14:45:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 14:45:50 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 14:45:50 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 14:45:50 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 14:45:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 14:45:50 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 14:45:50 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 14:45:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 14:45:50 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 14:45:50 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 14:45:50 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 14:45:50 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 14:45:50 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 14:45:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 14:45:50 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 14:45:50 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 14:45:50 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 14:45:50 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 14:45:50 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 14:45:50 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 14:45:50 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 14:45:50 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 14:45:50 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 14:45:50 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 14:45:50 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 14:45:50 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 14:45:50 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 14:45:50 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 14:45:51 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 14:45:51 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 14:45:51 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 14:45:51 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 14:45:51 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 14:45:51 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 14:45:51 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 14:45:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 14:45:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 14:45:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 14:45:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 14:45:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 14:45:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 14:45:51 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 14:45:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 14:45:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 14:45:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 14:45:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 14:45:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 14:45:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 14:45:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 14:45:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 14:45:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 14:45:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 14:45:51 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 14:45:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 14:45:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 14:45:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 14:45:51 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 14:45:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 14:45:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 14:45:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 14:45:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 14:45:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 14:45:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 14:45:51 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 14:45:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 14:45:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 14:45:52 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:45:52 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 14:45:52 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 32 ms +2016-04-07 14:45:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 14:45:52 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:45:52 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 14:45:52 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-07 14:45:52 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 14:45:52 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 14:45:52 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 14:45:52 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 14:45:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 14:45:52 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 14:45:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 14:45:52 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:45:52 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 14:45:52 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-07 14:45:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 14:45:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 14:45:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 14:45:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 14:45:52 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 14:45:52 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 14:45:52 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 14:45:52 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 14:45:52 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 14:45:52 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 14:45:52 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 14:45:52 INFO WorkspaceExplorerServiceImpl:142 - end time - 494 msc 0 sec +2016-04-07 14:45:52 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 14:46:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 14:46:39 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 14:47:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 14:47:34 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 14:48:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 14:48:29 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 14:49:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 14:49:24 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 14:50:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 14:50:19 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 14:51:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 14:51:14 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 14:52:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 14:52:09 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 14:53:04 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 14:53:04 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 14:53:04 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 14:53:04 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 14:53:04 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 14:53:04 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 14:53:04 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 14:53:04 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@26b3e75 +2016-04-07 14:53:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 14:53:04 INFO ASLSession:352 - Logging the entrance +2016-04-07 14:53:04 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 14:53:04 DEBUG TemplateModel:83 - 2016-04-07 14:53:04, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 14:53:04 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 14:53:04 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 14:53:08 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 14:53:08 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 14:53:08 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 14:53:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 14:53:08 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 14:53:08 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 14:53:08 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 14:53:08 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 113 ms +2016-04-07 14:53:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 14:53:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 14:53:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 14:53:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 14:53:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 14:53:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 14:53:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 14:53:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 14:53:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 14:53:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 14:53:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 14:53:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 14:53:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 14:53:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 14:53:08 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 14:53:08 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:53:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 14:53:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@473d9dc3 +2016-04-07 14:53:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@5f559db1 +2016-04-07 14:53:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@28b6f3be +2016-04-07 14:53:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@e316452 +2016-04-07 14:53:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 127 ms +2016-04-07 14:53:08 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 14:53:09 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 14:53:09 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 14:53:09 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 14:53:09 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 14:53:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 14:53:09 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:53:09 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 14:53:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-07 14:53:09 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 14:53:09 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 14:53:09 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 14:53:09 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 14:53:09 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 14:53:09 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 14:53:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 14:53:59 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 14:54:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 14:54:54 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 14:55:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 14:55:49 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 14:56:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 14:56:44 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 14:57:36 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 14:57:36 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 14:57:36 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 14:57:36 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 14:57:36 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 14:57:36 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 14:57:36 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 14:57:36 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@29dc6078 +2016-04-07 14:57:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 14:57:36 INFO ASLSession:352 - Logging the entrance +2016-04-07 14:57:36 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 14:57:36 DEBUG TemplateModel:83 - 2016-04-07 14:57:36, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 14:57:36 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 14:57:36 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 14:57:40 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 14:57:40 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 14:57:40 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 14:57:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 14:57:40 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 14:57:40 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 14:57:40 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 14:57:40 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 137 ms +2016-04-07 14:57:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 14:57:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 14:57:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 14:57:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 14:57:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 14:57:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 14:57:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 14:57:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 14:57:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 14:57:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 14:57:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 14:57:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 14:57:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 14:57:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 14:57:41 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 14:57:41 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:57:41 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 14:57:41 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6b58b016 +2016-04-07 14:57:41 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1c218a9d +2016-04-07 14:57:41 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@1f7f5395 +2016-04-07 14:57:41 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@21f0133f +2016-04-07 14:57:41 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 112 ms +2016-04-07 14:57:41 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 14:57:41 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 14:57:41 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 14:57:41 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 14:57:41 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 14:57:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 14:57:41 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 14:57:41 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 14:57:41 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-07 14:57:41 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 14:57:41 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 14:57:41 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 14:57:41 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 14:57:42 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 14:57:42 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 14:58:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 14:58:31 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 14:59:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 14:59:26 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 15:00:47 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 15:00:47 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 15:00:47 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 15:00:47 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 15:00:47 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 15:00:47 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:00:47 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 15:00:47 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@1cdb889f +2016-04-07 15:00:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:00:47 INFO ASLSession:352 - Logging the entrance +2016-04-07 15:00:47 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 15:00:47 DEBUG TemplateModel:83 - 2016-04-07 15:00:47, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 15:00:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:00:47 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 15:00:51 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 15:00:51 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 15:00:51 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 15:00:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:00:51 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:00:51 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:00:51 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 15:00:51 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 115 ms +2016-04-07 15:00:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 15:00:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 15:00:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 15:00:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 15:00:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 15:00:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 15:00:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 15:00:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 15:00:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 15:00:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 15:00:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 15:00:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 15:00:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 15:00:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 15:00:51 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 15:00:51 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:00:51 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 15:00:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@45f53cc0 +2016-04-07 15:00:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@4c19e8ca +2016-04-07 15:00:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@19f7c6ef +2016-04-07 15:00:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@574c29a3 +2016-04-07 15:00:51 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 120 ms +2016-04-07 15:00:51 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 15:00:52 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 15:00:52 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 15:00:52 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 15:00:52 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 15:00:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:00:52 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:00:52 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 15:00:52 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 32 ms +2016-04-07 15:00:52 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 15:00:52 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:00:52 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 15:00:52 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 15:00:52 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:00:52 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 15:01:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:01:42 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 15:02:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 15:02:37 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 15:03:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:03:04 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:03:04 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:03:04 INFO SClient4WPS:643 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-07 15:03:04 DEBUG SClient4WPS:276 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:03:04 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 15:03:04 DEBUG SClient4WPS:297 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 15:03:04 DEBUG SClient4WPS:301 - WPSClient->Fetching Inputs +2016-04-07 15:03:04 DEBUG SClient4WPS:303 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 15:03:04 DEBUG SClient4WPS:303 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 15:03:04 DEBUG SClient4WPS:303 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 15:03:04 DEBUG SClient4WPS:303 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-07 15:03:04 DEBUG SClient4WPS:303 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-07 15:03:04 DEBUG SClient4WPS:303 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-07 15:03:04 DEBUG SClient4WPS:303 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-07 15:03:04 DEBUG SClient4WPS:308 - WPSClient->Fetching Outputs +2016-04-07 15:03:04 DEBUG SClient4WPS:310 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 15:03:04 DEBUG SClient4WPS:310 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 15:03:04 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:03:04 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 15:03:04 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 15:03:04 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 15:03:04 DEBUG WPS2SM:201 - Schema: null +2016-04-07 15:03:04 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 15:03:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 15:03:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 15:03:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:03:04 DEBUG SClient4WPS:658 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 15:03:04 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 15:03:05 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:03:05 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:03:05 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 15:03:05 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 15:03:05 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 15:03:05 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 15:03:05 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 15:03:05 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 15:03:05 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 15:03:05 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 15:03:05 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 15:03:05 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 15:03:05 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 15:03:05 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:03:05 DEBUG SClient4WPS:658 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 15:03:05 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 15:03:05 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:03:05 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:03:05 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 15:03:05 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 15:03:05 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 15:03:05 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:03:05 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 15:03:05 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-07 15:03:05 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:03:05 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 15:03:05 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-07 15:03:05 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-07 15:03:05 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-07 15:03:05 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:03:05 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-07 15:03:05 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-07 15:03:05 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:03:05 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 15:03:05 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 15:03:05 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-07 15:03:05 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-07 15:03:05 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:03:05 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 15:03:05 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-07 15:03:05 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:03:05 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 15:03:05 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-07 15:03:05 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-07 15:03:05 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-07 15:03:05 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:03:05 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-07 15:03:05 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 15:03:05 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:03:05 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 15:03:05 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-07 15:03:05 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-07 15:03:05 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 15:03:05 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:03:05 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-07 15:03:05 DEBUG SClient4WPS:662 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-07 15:03:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:03:05 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 15:03:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:03:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:03:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:03:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:03:05 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:03:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:03:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:03:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:03:05 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 15:03:05 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 15:03:05 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 15:03:05 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 15:03:05 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 15:03:05 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 15:03:05 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 15:03:05 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 15:03:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 15:03:05 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:03:05 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 15:03:05 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-07 15:03:05 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 15:03:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:03:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:03:05 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 15:03:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:03:05 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 15:03:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:03:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:03:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:03:05 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 15:03:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 15:03:05 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 15:03:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 15:03:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:03:05 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:03:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:03:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:03:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:03:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:03:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:03:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:03:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:03:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:03:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:03:05 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 15:03:05 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 15:03:05 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 15:03:06 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 15:03:06 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 15:03:06 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 15:03:06 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 15:03:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:03:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:03:06 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:03:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:03:06 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:03:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:03:06 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:03:06 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:03:06 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 15:03:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:03:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:03:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:03:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:03:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:03:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:03:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 15:03:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:03:06 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:03:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:03:06 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:03:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:03:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:03:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:03:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:03:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:03:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:03:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:03:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:03:06 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 15:03:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 15:03:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 15:03:06 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:03:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 15:03:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 35 ms +2016-04-07 15:03:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 15:03:06 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:03:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 15:03:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-07 15:03:06 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 15:03:06 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 15:03:06 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 15:03:06 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 15:03:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 15:03:06 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 15:03:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 15:03:06 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:03:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 15:03:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-07 15:03:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 15:03:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 15:03:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 15:03:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 15:03:06 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 15:03:06 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 15:03:06 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 15:03:06 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 15:03:06 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 15:03:06 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 15:03:06 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 15:03:06 INFO WorkspaceExplorerServiceImpl:142 - end time - 533 msc 0 sec +2016-04-07 15:03:06 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 15:03:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:03:32 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:04:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:04:27 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:05:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:05:22 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:06:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:06:17 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:07:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:07:12 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 15:08:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:08:07 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 15:08:47 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 15:08:47 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 15:08:47 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 15:08:47 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 15:08:47 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 15:08:47 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 15:08:47 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 15:08:47 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@a814763 +2016-04-07 15:08:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:08:47 INFO ASLSession:352 - Logging the entrance +2016-04-07 15:08:47 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 15:08:47 DEBUG TemplateModel:83 - 2016-04-07 15:08:47, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 15:08:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:08:47 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 15:08:50 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 15:08:50 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 15:08:50 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 15:08:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:08:50 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 15:08:50 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:08:50 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 15:08:50 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 139 ms +2016-04-07 15:08:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 15:08:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 15:08:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 15:08:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 15:08:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 15:08:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 15:08:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 15:08:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 15:08:50 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 15:08:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 15:08:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 15:08:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 15:08:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 15:08:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 15:08:51 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 15:08:51 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:08:51 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 15:08:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@49223fa3 +2016-04-07 15:08:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@56133d87 +2016-04-07 15:08:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@746aa93b +2016-04-07 15:08:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@4495f527 +2016-04-07 15:08:51 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 131 ms +2016-04-07 15:08:51 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 15:08:51 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 15:08:51 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 15:08:51 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 15:08:51 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 15:08:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:08:51 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:08:51 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 15:08:51 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 34 ms +2016-04-07 15:08:51 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 15:08:51 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:08:51 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 15:08:51 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 15:08:53 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:08:53 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 15:09:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:09:42 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:10:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:10:37 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:11:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:11:32 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:12:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:12:27 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:13:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:13:22 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 15:14:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:14:17 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:15:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:15:12 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:16:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:16:07 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:17:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:17:02 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:17:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 15:17:57 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 15:18:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:18:52 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:22:53 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 15:22:53 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 15:22:53 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 15:22:53 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 15:22:53 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 15:22:53 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:22:53 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 15:22:53 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7ed52a93 +2016-04-07 15:22:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:22:53 INFO ASLSession:352 - Logging the entrance +2016-04-07 15:22:53 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 15:22:53 DEBUG TemplateModel:83 - 2016-04-07 15:22:53, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 15:22:53 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:22:53 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 15:22:56 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 15:22:56 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 15:22:56 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 15:22:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-07 15:22:56 DEBUG ASLSession:458 - Getting security token: null in thread 28 +2016-04-07 15:22:56 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:22:57 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 15:22:57 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 146 ms +2016-04-07 15:22:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 15:22:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 15:22:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 15:22:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 15:22:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 15:22:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 15:22:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 15:22:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 15:22:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 15:22:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 15:22:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 15:22:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 15:22:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 15:22:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 15:22:57 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 15:22:57 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:22:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 15:22:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6a43308d +2016-04-07 15:22:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@696770a8 +2016-04-07 15:22:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@7f2ad84f +2016-04-07 15:22:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@29a8dc62 +2016-04-07 15:22:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 93 ms +2016-04-07 15:22:57 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 15:22:57 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 15:22:57 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 15:22:57 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 15:22:57 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 15:22:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-07 15:22:57 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:22:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 15:22:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-07 15:22:57 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 15:22:57 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:22:57 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 15:22:57 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 15:22:58 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:22:58 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 15:23:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-07 15:23:48 DEBUG ASLSession:458 - Getting security token: null in thread 28 +2016-04-07 15:24:40 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 15:24:40 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 15:24:40 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 15:24:40 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 15:24:40 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 15:24:40 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:24:40 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 15:24:40 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@11bbd441 +2016-04-07 15:24:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:24:40 INFO ASLSession:352 - Logging the entrance +2016-04-07 15:24:40 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 15:24:40 DEBUG TemplateModel:83 - 2016-04-07 15:24:40, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 15:24:40 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:24:40 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 15:24:45 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 15:24:45 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 15:24:45 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 15:24:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:24:45 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 15:24:45 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:24:45 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 15:24:45 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 138 ms +2016-04-07 15:24:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 15:24:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 15:24:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 15:24:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 15:24:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 15:24:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 15:24:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 15:24:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 15:24:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 15:24:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 15:24:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 15:24:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 15:24:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 15:24:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 15:24:45 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 15:24:46 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:24:46 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 15:24:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@2b61066a +2016-04-07 15:24:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@6810da0f +2016-04-07 15:24:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@5c78ee1b +2016-04-07 15:24:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@20d2f589 +2016-04-07 15:24:46 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 116 ms +2016-04-07 15:24:46 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 15:24:46 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 15:24:46 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 15:24:46 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 15:24:46 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 15:24:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:24:46 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:24:46 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 15:24:46 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 33 ms +2016-04-07 15:24:46 DEBUG SClient4WPS:116 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 15:24:46 INFO SClient4WPS:121 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:24:46 DEBUG SClient4WPS:132 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 15:24:46 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 15:24:47 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:24:47 DEBUG SClient4WPS:262 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get..., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching al..., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precisio..., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 poin..., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold..., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for t..., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another enviro..., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statis..., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each obse..., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data p..., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a p..., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire..., description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken eith..., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and..., description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this fo..., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a ce..., description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samp..., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time ..., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure pa..., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is s..., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain..., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatica..., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically search..., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 15:25:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 15:25:35 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 15:26:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:26:30 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 15:27:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:27:25 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 15:27:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:27:49 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:27:49 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:27:49 INFO SClient4WPS:643 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-07 15:27:49 DEBUG SClient4WPS:276 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:27:49 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 15:27:52 DEBUG SClient4WPS:297 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 15:27:52 DEBUG SClient4WPS:301 - WPSClient->Fetching Inputs +2016-04-07 15:27:52 DEBUG SClient4WPS:303 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 15:27:52 DEBUG SClient4WPS:303 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-07 15:27:52 DEBUG SClient4WPS:303 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-07 15:27:52 DEBUG SClient4WPS:303 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-07 15:27:52 DEBUG SClient4WPS:303 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-07 15:27:52 DEBUG SClient4WPS:303 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-07 15:27:52 DEBUG SClient4WPS:303 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-07 15:27:52 DEBUG SClient4WPS:308 - WPSClient->Fetching Outputs +2016-04-07 15:27:52 DEBUG SClient4WPS:310 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 15:27:52 DEBUG SClient4WPS:310 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 15:27:52 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:27:52 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-07 15:27:52 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 15:27:52 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 15:27:52 DEBUG WPS2SM:201 - Schema: null +2016-04-07 15:27:52 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 15:27:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 15:27:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-07 15:27:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:27:52 DEBUG SClient4WPS:658 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 15:27:52 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 15:27:52 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:27:52 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:27:52 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 15:27:52 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 15:27:52 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 15:27:52 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-07 15:27:52 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 15:27:52 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-07 15:27:52 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 15:27:52 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-07 15:27:52 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 15:27:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-07 15:27:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 15:27:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:27:52 DEBUG SClient4WPS:658 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 15:27:52 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-07 15:27:52 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:27:52 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:27:52 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-07 15:27:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 15:27:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-07 15:27:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:27:52 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-07 15:27:52 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-07 15:27:52 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:27:52 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 15:27:52 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-07 15:27:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-07 15:27:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-07 15:27:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:27:52 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-07 15:27:52 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-07 15:27:52 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:27:52 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 15:27:52 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 15:27:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-07 15:27:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-07 15:27:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:27:52 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 15:27:52 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-07 15:27:52 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:27:52 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:27:52 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-07 15:27:52 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-07 15:27:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-07 15:27:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-07 15:27:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:27:52 DEBUG SClient4WPS:658 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-07 15:27:52 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-07 15:27:52 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:27:52 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 15:27:52 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-07 15:27:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-07 15:27:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-07 15:27:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:27:52 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-07 15:27:52 DEBUG SClient4WPS:662 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-07 15:27:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:27:52 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:27:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:27:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:27:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:27:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 15:27:52 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 15:27:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 15:27:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:27:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:27:52 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 15:27:52 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 15:27:52 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 15:27:52 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 15:27:52 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 15:27:52 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 15:27:52 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 15:27:52 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 15:27:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 15:27:53 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:27:53 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 15:27:53 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 96 ms +2016-04-07 15:27:53 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 15:27:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:27:53 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:27:53 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:27:53 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 15:27:53 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 15:27:53 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:27:53 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:27:53 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:27:53 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 15:27:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:27:53 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 15:27:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:27:53 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:27:53 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:27:53 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:27:53 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:27:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:27:53 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:27:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:27:53 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:27:53 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:27:53 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:27:53 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:27:53 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 15:27:54 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 15:27:54 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 15:27:54 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 15:27:54 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 15:27:54 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 15:27:54 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 15:27:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:27:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:27:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:27:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:27:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:27:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:27:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:27:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:27:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:27:54 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 15:27:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:27:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:27:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:27:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 15:27:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:27:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:27:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:27:54 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:27:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:27:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:27:54 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:27:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:27:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:27:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:27:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:27:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:27:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:27:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:27:54 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 15:27:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 15:27:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:27:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:27:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 15:27:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 70 ms +2016-04-07 15:27:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 15:27:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:27:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 15:27:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 15 ms +2016-04-07 15:27:54 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 15:27:54 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 15:27:54 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 15:27:54 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 15:27:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 15:27:54 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 15:27:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 15:27:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:27:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 15:27:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-07 15:27:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 15:27:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 15:27:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 15:27:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:27:54 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 15:27:54 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 15:27:54 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 15:27:54 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 15:27:54 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 15:27:54 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 15:27:54 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 15:27:54 INFO WorkspaceExplorerServiceImpl:142 - end time - 500 msc 0 sec +2016-04-07 15:27:54 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 15:28:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:28:20 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:28:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 15:28:27 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 15:28:27 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:28:27 INFO SClient4WPS:643 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING +2016-04-07 15:28:27 DEBUG SClient4WPS:276 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:28:27 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 15:28:28 DEBUG SClient4WPS:297 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING + MAX_ENT_NICHE_MODELLING + A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt + + + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + + + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + + + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + + + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + + + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + + + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + + + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + + + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + + + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + + + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + + + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + + + + + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + + + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + + + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + + + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + + + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + + + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 15:28:28 DEBUG SClient4WPS:301 - WPSClient->Fetching Inputs +2016-04-07 15:28:28 DEBUG SClient4WPS:303 - WPSClient->Input: + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + +2016-04-07 15:28:28 DEBUG SClient4WPS:303 - WPSClient->Input: + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + +2016-04-07 15:28:28 DEBUG SClient4WPS:303 - WPSClient->Input: + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + +2016-04-07 15:28:28 DEBUG SClient4WPS:303 - WPSClient->Input: + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + +2016-04-07 15:28:28 DEBUG SClient4WPS:303 - WPSClient->Input: + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 15:28:28 DEBUG SClient4WPS:303 - WPSClient->Input: + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + +2016-04-07 15:28:28 DEBUG SClient4WPS:303 - WPSClient->Input: + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + +2016-04-07 15:28:28 DEBUG SClient4WPS:303 - WPSClient->Input: + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + +2016-04-07 15:28:28 DEBUG SClient4WPS:303 - WPSClient->Input: + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + +2016-04-07 15:28:28 DEBUG SClient4WPS:303 - WPSClient->Input: + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + +2016-04-07 15:28:28 DEBUG SClient4WPS:303 - WPSClient->Input: + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + +2016-04-07 15:28:28 DEBUG SClient4WPS:303 - WPSClient->Input: + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + +2016-04-07 15:28:28 DEBUG SClient4WPS:308 - WPSClient->Fetching Outputs +2016-04-07 15:28:28 DEBUG SClient4WPS:310 - WPSClient->Output: + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + +2016-04-07 15:28:28 DEBUG SClient4WPS:310 - WPSClient->Output: + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + +2016-04-07 15:28:28 DEBUG SClient4WPS:310 - WPSClient->Output: + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + +2016-04-07 15:28:28 DEBUG SClient4WPS:310 - WPSClient->Output: + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + +2016-04-07 15:28:28 DEBUG SClient4WPS:310 - WPSClient->Output: + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + +2016-04-07 15:28:28 DEBUG SClient4WPS:310 - WPSClient->Output: + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 15:28:28 DEBUG SClient4WPS:310 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 15:28:28 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:28:28 DEBUG WPS2SM:254 - Conversion to SM Type->OutputTableLabel is a Literal Input +2016-04-07 15:28:28 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:28 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:28:28 DEBUG WPS2SM:101 - Guessed default value: maxent_ +2016-04-07 15:28:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the table to produce +2016-04-07 15:28:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OutputTableLabel +2016-04-07 15:28:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:28 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT] +2016-04-07 15:28:28 DEBUG WPS2SM:254 - Conversion to SM Type->SpeciesName is a Literal Input +2016-04-07 15:28:28 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:28 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:28:28 DEBUG WPS2SM:101 - Guessed default value: generic_species +2016-04-07 15:28:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the species to model and the occurrence records refer to +2016-04-07 15:28:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:SpeciesName +2016-04-07 15:28:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:28 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT] +2016-04-07 15:28:28 DEBUG WPS2SM:254 - Conversion to SM Type->MaxIterations is a Literal Input +2016-04-07 15:28:28 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:28 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 15:28:28 DEBUG WPS2SM:101 - Guessed default value: 1000 +2016-04-07 15:28:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The number of learning iterations of the MaxEnt algorithm +2016-04-07 15:28:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:MaxIterations +2016-04-07 15:28:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:28 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT] +2016-04-07 15:28:28 DEBUG WPS2SM:254 - Conversion to SM Type->DefaultPrevalence is a Literal Input +2016-04-07 15:28:28 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:28 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 15:28:28 DEBUG WPS2SM:101 - Guessed default value: 0.5 +2016-04-07 15:28:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A priori probability of presence at ordinary occurrence points +2016-04-07 15:28:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:DefaultPrevalence +2016-04-07 15:28:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:28 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT] +2016-04-07 15:28:28 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencesTable is a Complex Input +2016-04-07 15:28:28 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 15:28:28 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 15:28:28 DEBUG WPS2SM:201 - Schema: null +2016-04-07 15:28:28 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 15:28:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] +2016-04-07 15:28:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencesTable +2016-04-07 15:28:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:28 DEBUG SClient4WPS:658 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 15:28:28 DEBUG WPS2SM:254 - Conversion to SM Type->LongitudeColumn is a Literal Input +2016-04-07 15:28:28 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:28 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:28:28 DEBUG WPS2SM:101 - Guessed default value: decimallongitude +2016-04-07 15:28:28 DEBUG WPS2SM:130 - Machter title: The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude] +2016-04-07 15:28:28 DEBUG WPS2SM:131 - Machter find: true +2016-04-07 15:28:28 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-07 15:28:28 DEBUG WPS2SM:133 - Machter start: 40 +2016-04-07 15:28:28 DEBUG WPS2SM:134 - Machter end: 82 +2016-04-07 15:28:28 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-07 15:28:28 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-07 15:28:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing longitude values [the name of a column from OccurrencesTable] +2016-04-07 15:28:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LongitudeColumn +2016-04-07 15:28:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:28 DEBUG SClient4WPS:658 - InputParameter: Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN] +2016-04-07 15:28:28 DEBUG WPS2SM:254 - Conversion to SM Type->LatitudeColumn is a Literal Input +2016-04-07 15:28:28 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:28 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:28:28 DEBUG WPS2SM:101 - Guessed default value: decimallatitude +2016-04-07 15:28:28 DEBUG WPS2SM:130 - Machter title: The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude] +2016-04-07 15:28:28 DEBUG WPS2SM:131 - Machter find: true +2016-04-07 15:28:28 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-07 15:28:28 DEBUG WPS2SM:133 - Machter start: 39 +2016-04-07 15:28:28 DEBUG WPS2SM:134 - Machter end: 81 +2016-04-07 15:28:28 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-07 15:28:28 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-07 15:28:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing latitude values [the name of a column from OccurrencesTable] +2016-04-07 15:28:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LatitudeColumn +2016-04-07 15:28:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:28 DEBUG SClient4WPS:658 - InputParameter: Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN] +2016-04-07 15:28:28 DEBUG WPS2SM:254 - Conversion to SM Type->XResolution is a Literal Input +2016-04-07 15:28:28 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:28 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 15:28:28 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 15:28:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the X axis in decimal degrees +2016-04-07 15:28:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:XResolution +2016-04-07 15:28:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:28 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 15:28:28 DEBUG WPS2SM:254 - Conversion to SM Type->YResolution is a Literal Input +2016-04-07 15:28:28 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:28 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 15:28:28 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 15:28:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the Y axis in decimal degrees +2016-04-07 15:28:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:YResolution +2016-04-07 15:28:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:28 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 15:28:28 DEBUG WPS2SM:254 - Conversion to SM Type->Layers is a Literal Input +2016-04-07 15:28:28 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:28 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:28:28 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 15:28:28 DEBUG WPS2SM:147 - Machter title: The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 15:28:28 DEBUG WPS2SM:148 - Machter find: true +2016-04-07 15:28:28 DEBUG WPS2SM:149 - Machter group: a sequence of values separated by | +2016-04-07 15:28:28 DEBUG WPS2SM:150 - Machter start: 501 +2016-04-07 15:28:28 DEBUG WPS2SM:151 - Machter end: 536 +2016-04-07 15:28:28 DEBUG WPS2SM:152 - Machter Group Count: 1 +2016-04-07 15:28:28 DEBUG WPS2SM:155 - Matcher separator: | +2016-04-07 15:28:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) +2016-04-07 15:28:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Layers +2016-04-07 15:28:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:28 DEBUG SClient4WPS:658 - InputParameter: ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST] +2016-04-07 15:28:28 DEBUG WPS2SM:254 - Conversion to SM Type->Z is a Literal Input +2016-04-07 15:28:28 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:28 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 15:28:28 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-07 15:28:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer +2016-04-07 15:28:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Z +2016-04-07 15:28:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:28 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-07 15:28:28 DEBUG WPS2SM:254 - Conversion to SM Type->TimeIndex is a Literal Input +2016-04-07 15:28:28 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:28 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 15:28:28 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-07 15:28:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Time Index. The default is the first time indexed in the input environmental datasets +2016-04-07 15:28:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:TimeIndex +2016-04-07 15:28:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:28 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-07 15:28:28 DEBUG SClient4WPS:662 - Parameters: [ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT], TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN], Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST], ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT]] +2016-04-07 15:28:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:28:28 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:28:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:28:28 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:28:28 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:28:28 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:28:28 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:28:28 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:28:28 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:28:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:28:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:28:28 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:28:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:28:28 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:28:28 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:28:28 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:28:28 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:28:28 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:28:28 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:28:28 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 15:28:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:28:28 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 15:28:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:28:28 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:28:28 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:28:28 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:28:28 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:28:28 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:28:28 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:28:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:28:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:28:28 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 15:28:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:28:28 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:28:28 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:28:28 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:28:28 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:28:28 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:28:28 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:28:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:28:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:28:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 15:28:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:28:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:28:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:28:28 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 15:28:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:28:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:28:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:28:28 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:28:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:28:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:28:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:28:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:28:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:28:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:28:28 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 15:28:28 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 15:28:28 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 15:28:28 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 15:28:28 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 15:28:28 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 15:28:28 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 15:28:28 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 15:28:28 INFO WorkspaceExplorerServiceImpl:142 - end time - 258 msc 0 sec +2016-04-07 15:28:28 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 15:28:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:28:55 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:28:55 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:28:55 INFO SClient4WPS:643 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING +2016-04-07 15:28:55 DEBUG SClient4WPS:276 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:28:55 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 15:28:55 DEBUG SClient4WPS:297 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING + MAX_ENT_NICHE_MODELLING + A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt + + + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + + + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + + + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + + + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + + + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + + + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + + + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + + + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + + + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + + + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + + + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + + + + + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + + + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + + + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + + + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + + + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + + + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 15:28:55 DEBUG SClient4WPS:301 - WPSClient->Fetching Inputs +2016-04-07 15:28:55 DEBUG SClient4WPS:303 - WPSClient->Input: + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + +2016-04-07 15:28:55 DEBUG SClient4WPS:303 - WPSClient->Input: + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + +2016-04-07 15:28:55 DEBUG SClient4WPS:303 - WPSClient->Input: + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + +2016-04-07 15:28:55 DEBUG SClient4WPS:303 - WPSClient->Input: + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + +2016-04-07 15:28:55 DEBUG SClient4WPS:303 - WPSClient->Input: + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 15:28:55 DEBUG SClient4WPS:303 - WPSClient->Input: + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + +2016-04-07 15:28:55 DEBUG SClient4WPS:303 - WPSClient->Input: + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + +2016-04-07 15:28:55 DEBUG SClient4WPS:303 - WPSClient->Input: + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + +2016-04-07 15:28:55 DEBUG SClient4WPS:303 - WPSClient->Input: + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + +2016-04-07 15:28:55 DEBUG SClient4WPS:303 - WPSClient->Input: + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + +2016-04-07 15:28:55 DEBUG SClient4WPS:303 - WPSClient->Input: + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + +2016-04-07 15:28:55 DEBUG SClient4WPS:303 - WPSClient->Input: + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + +2016-04-07 15:28:55 DEBUG SClient4WPS:308 - WPSClient->Fetching Outputs +2016-04-07 15:28:55 DEBUG SClient4WPS:310 - WPSClient->Output: + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + +2016-04-07 15:28:55 DEBUG SClient4WPS:310 - WPSClient->Output: + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + +2016-04-07 15:28:55 DEBUG SClient4WPS:310 - WPSClient->Output: + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + +2016-04-07 15:28:55 DEBUG SClient4WPS:310 - WPSClient->Output: + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + +2016-04-07 15:28:55 DEBUG SClient4WPS:310 - WPSClient->Output: + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + +2016-04-07 15:28:55 DEBUG SClient4WPS:310 - WPSClient->Output: + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 15:28:55 DEBUG SClient4WPS:310 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 15:28:55 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:28:55 DEBUG WPS2SM:254 - Conversion to SM Type->OutputTableLabel is a Literal Input +2016-04-07 15:28:55 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:55 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:28:55 DEBUG WPS2SM:101 - Guessed default value: maxent_ +2016-04-07 15:28:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the table to produce +2016-04-07 15:28:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OutputTableLabel +2016-04-07 15:28:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:55 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT] +2016-04-07 15:28:55 DEBUG WPS2SM:254 - Conversion to SM Type->SpeciesName is a Literal Input +2016-04-07 15:28:55 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:55 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:28:55 DEBUG WPS2SM:101 - Guessed default value: generic_species +2016-04-07 15:28:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the species to model and the occurrence records refer to +2016-04-07 15:28:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:SpeciesName +2016-04-07 15:28:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:55 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT] +2016-04-07 15:28:55 DEBUG WPS2SM:254 - Conversion to SM Type->MaxIterations is a Literal Input +2016-04-07 15:28:55 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:55 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 15:28:55 DEBUG WPS2SM:101 - Guessed default value: 1000 +2016-04-07 15:28:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The number of learning iterations of the MaxEnt algorithm +2016-04-07 15:28:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:MaxIterations +2016-04-07 15:28:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:55 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT] +2016-04-07 15:28:55 DEBUG WPS2SM:254 - Conversion to SM Type->DefaultPrevalence is a Literal Input +2016-04-07 15:28:55 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:55 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 15:28:55 DEBUG WPS2SM:101 - Guessed default value: 0.5 +2016-04-07 15:28:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A priori probability of presence at ordinary occurrence points +2016-04-07 15:28:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:DefaultPrevalence +2016-04-07 15:28:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:55 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT] +2016-04-07 15:28:55 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencesTable is a Complex Input +2016-04-07 15:28:55 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 15:28:55 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 15:28:55 DEBUG WPS2SM:201 - Schema: null +2016-04-07 15:28:55 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 15:28:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] +2016-04-07 15:28:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencesTable +2016-04-07 15:28:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:55 DEBUG SClient4WPS:658 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 15:28:55 DEBUG WPS2SM:254 - Conversion to SM Type->LongitudeColumn is a Literal Input +2016-04-07 15:28:55 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:55 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:28:55 DEBUG WPS2SM:101 - Guessed default value: decimallongitude +2016-04-07 15:28:55 DEBUG WPS2SM:130 - Machter title: The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude] +2016-04-07 15:28:55 DEBUG WPS2SM:131 - Machter find: true +2016-04-07 15:28:55 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-07 15:28:55 DEBUG WPS2SM:133 - Machter start: 40 +2016-04-07 15:28:55 DEBUG WPS2SM:134 - Machter end: 82 +2016-04-07 15:28:55 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-07 15:28:55 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-07 15:28:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing longitude values [the name of a column from OccurrencesTable] +2016-04-07 15:28:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LongitudeColumn +2016-04-07 15:28:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:55 DEBUG SClient4WPS:658 - InputParameter: Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN] +2016-04-07 15:28:55 DEBUG WPS2SM:254 - Conversion to SM Type->LatitudeColumn is a Literal Input +2016-04-07 15:28:55 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:55 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:28:55 DEBUG WPS2SM:101 - Guessed default value: decimallatitude +2016-04-07 15:28:55 DEBUG WPS2SM:130 - Machter title: The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude] +2016-04-07 15:28:55 DEBUG WPS2SM:131 - Machter find: true +2016-04-07 15:28:55 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-07 15:28:55 DEBUG WPS2SM:133 - Machter start: 39 +2016-04-07 15:28:55 DEBUG WPS2SM:134 - Machter end: 81 +2016-04-07 15:28:55 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-07 15:28:55 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-07 15:28:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing latitude values [the name of a column from OccurrencesTable] +2016-04-07 15:28:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LatitudeColumn +2016-04-07 15:28:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:55 DEBUG SClient4WPS:658 - InputParameter: Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN] +2016-04-07 15:28:55 DEBUG WPS2SM:254 - Conversion to SM Type->XResolution is a Literal Input +2016-04-07 15:28:55 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:55 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 15:28:55 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 15:28:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the X axis in decimal degrees +2016-04-07 15:28:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:XResolution +2016-04-07 15:28:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:55 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 15:28:55 DEBUG WPS2SM:254 - Conversion to SM Type->YResolution is a Literal Input +2016-04-07 15:28:55 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:55 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 15:28:55 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 15:28:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the Y axis in decimal degrees +2016-04-07 15:28:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:YResolution +2016-04-07 15:28:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:55 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 15:28:55 DEBUG WPS2SM:254 - Conversion to SM Type->Layers is a Literal Input +2016-04-07 15:28:55 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:55 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:28:55 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 15:28:55 DEBUG WPS2SM:147 - Machter title: The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 15:28:55 DEBUG WPS2SM:148 - Machter find: true +2016-04-07 15:28:55 DEBUG WPS2SM:149 - Machter group: a sequence of values separated by | +2016-04-07 15:28:55 DEBUG WPS2SM:150 - Machter start: 501 +2016-04-07 15:28:55 DEBUG WPS2SM:151 - Machter end: 536 +2016-04-07 15:28:55 DEBUG WPS2SM:152 - Machter Group Count: 1 +2016-04-07 15:28:55 DEBUG WPS2SM:155 - Matcher separator: | +2016-04-07 15:28:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) +2016-04-07 15:28:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Layers +2016-04-07 15:28:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:55 DEBUG SClient4WPS:658 - InputParameter: ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST] +2016-04-07 15:28:55 DEBUG WPS2SM:254 - Conversion to SM Type->Z is a Literal Input +2016-04-07 15:28:55 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:55 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 15:28:55 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-07 15:28:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer +2016-04-07 15:28:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Z +2016-04-07 15:28:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:55 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-07 15:28:55 DEBUG WPS2SM:254 - Conversion to SM Type->TimeIndex is a Literal Input +2016-04-07 15:28:55 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:28:55 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 15:28:55 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-07 15:28:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Time Index. The default is the first time indexed in the input environmental datasets +2016-04-07 15:28:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:TimeIndex +2016-04-07 15:28:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:28:55 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-07 15:28:55 DEBUG SClient4WPS:662 - Parameters: [ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT], TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN], Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST], ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT]] +2016-04-07 15:28:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:28:55 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 15:28:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:28:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:28:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:28:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:28:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:28:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:28:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:28:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:28:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:28:55 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:28:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:28:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:28:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:28:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:28:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:28:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:28:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:28:55 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 15:28:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:28:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 15:28:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:28:56 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 15:28:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:28:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:28:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:28:56 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:28:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:28:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:28:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:28:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:28:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:28:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:28:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:28:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:28:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:28:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:28:56 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 15:28:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:28:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:28:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:28:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:28:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:28:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:28:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:28:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:28:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:28:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:28:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:28:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:28:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:28:56 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 15:28:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:28:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:28:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:28:56 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 15:28:56 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 15:28:56 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 15:28:56 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 15:28:56 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 15:28:56 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 15:28:56 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 15:28:56 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 15:28:56 INFO WorkspaceExplorerServiceImpl:142 - end time - 194 msc 0 sec +2016-04-07 15:28:56 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 15:29:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:29:02 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:29:02 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:29:02 INFO SClient4WPS:643 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING +2016-04-07 15:29:02 DEBUG SClient4WPS:276 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:29:02 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 15:29:02 DEBUG SClient4WPS:297 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING + MAX_ENT_NICHE_MODELLING + A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt + + + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + + + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + + + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + + + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + + + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + + + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + + + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + + + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + + + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + + + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + + + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + + + + + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + + + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + + + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + + + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + + + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + + + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 15:29:02 DEBUG SClient4WPS:301 - WPSClient->Fetching Inputs +2016-04-07 15:29:02 DEBUG SClient4WPS:303 - WPSClient->Input: + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + +2016-04-07 15:29:02 DEBUG SClient4WPS:303 - WPSClient->Input: + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + +2016-04-07 15:29:02 DEBUG SClient4WPS:303 - WPSClient->Input: + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + +2016-04-07 15:29:02 DEBUG SClient4WPS:303 - WPSClient->Input: + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + +2016-04-07 15:29:02 DEBUG SClient4WPS:303 - WPSClient->Input: + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 15:29:02 DEBUG SClient4WPS:303 - WPSClient->Input: + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + +2016-04-07 15:29:02 DEBUG SClient4WPS:303 - WPSClient->Input: + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + +2016-04-07 15:29:02 DEBUG SClient4WPS:303 - WPSClient->Input: + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + +2016-04-07 15:29:02 DEBUG SClient4WPS:303 - WPSClient->Input: + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + +2016-04-07 15:29:02 DEBUG SClient4WPS:303 - WPSClient->Input: + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + +2016-04-07 15:29:02 DEBUG SClient4WPS:303 - WPSClient->Input: + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + +2016-04-07 15:29:02 DEBUG SClient4WPS:303 - WPSClient->Input: + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + +2016-04-07 15:29:02 DEBUG SClient4WPS:308 - WPSClient->Fetching Outputs +2016-04-07 15:29:02 DEBUG SClient4WPS:310 - WPSClient->Output: + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + +2016-04-07 15:29:02 DEBUG SClient4WPS:310 - WPSClient->Output: + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + +2016-04-07 15:29:02 DEBUG SClient4WPS:310 - WPSClient->Output: + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + +2016-04-07 15:29:02 DEBUG SClient4WPS:310 - WPSClient->Output: + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + +2016-04-07 15:29:02 DEBUG SClient4WPS:310 - WPSClient->Output: + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + +2016-04-07 15:29:02 DEBUG SClient4WPS:310 - WPSClient->Output: + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 15:29:02 DEBUG SClient4WPS:310 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 15:29:02 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:29:02 DEBUG WPS2SM:254 - Conversion to SM Type->OutputTableLabel is a Literal Input +2016-04-07 15:29:02 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:02 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:29:02 DEBUG WPS2SM:101 - Guessed default value: maxent_ +2016-04-07 15:29:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the table to produce +2016-04-07 15:29:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OutputTableLabel +2016-04-07 15:29:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:02 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT] +2016-04-07 15:29:02 DEBUG WPS2SM:254 - Conversion to SM Type->SpeciesName is a Literal Input +2016-04-07 15:29:02 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:02 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:29:02 DEBUG WPS2SM:101 - Guessed default value: generic_species +2016-04-07 15:29:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the species to model and the occurrence records refer to +2016-04-07 15:29:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:SpeciesName +2016-04-07 15:29:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:02 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT] +2016-04-07 15:29:02 DEBUG WPS2SM:254 - Conversion to SM Type->MaxIterations is a Literal Input +2016-04-07 15:29:02 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:02 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 15:29:02 DEBUG WPS2SM:101 - Guessed default value: 1000 +2016-04-07 15:29:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The number of learning iterations of the MaxEnt algorithm +2016-04-07 15:29:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:MaxIterations +2016-04-07 15:29:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:02 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT] +2016-04-07 15:29:02 DEBUG WPS2SM:254 - Conversion to SM Type->DefaultPrevalence is a Literal Input +2016-04-07 15:29:02 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:02 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 15:29:02 DEBUG WPS2SM:101 - Guessed default value: 0.5 +2016-04-07 15:29:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A priori probability of presence at ordinary occurrence points +2016-04-07 15:29:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:DefaultPrevalence +2016-04-07 15:29:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:02 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT] +2016-04-07 15:29:02 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencesTable is a Complex Input +2016-04-07 15:29:02 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 15:29:02 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 15:29:02 DEBUG WPS2SM:201 - Schema: null +2016-04-07 15:29:02 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 15:29:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] +2016-04-07 15:29:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencesTable +2016-04-07 15:29:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:02 DEBUG SClient4WPS:658 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 15:29:02 DEBUG WPS2SM:254 - Conversion to SM Type->LongitudeColumn is a Literal Input +2016-04-07 15:29:02 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:02 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:29:02 DEBUG WPS2SM:101 - Guessed default value: decimallongitude +2016-04-07 15:29:02 DEBUG WPS2SM:130 - Machter title: The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude] +2016-04-07 15:29:02 DEBUG WPS2SM:131 - Machter find: true +2016-04-07 15:29:02 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-07 15:29:02 DEBUG WPS2SM:133 - Machter start: 40 +2016-04-07 15:29:02 DEBUG WPS2SM:134 - Machter end: 82 +2016-04-07 15:29:02 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-07 15:29:02 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-07 15:29:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing longitude values [the name of a column from OccurrencesTable] +2016-04-07 15:29:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LongitudeColumn +2016-04-07 15:29:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:02 DEBUG SClient4WPS:658 - InputParameter: Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN] +2016-04-07 15:29:02 DEBUG WPS2SM:254 - Conversion to SM Type->LatitudeColumn is a Literal Input +2016-04-07 15:29:02 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:02 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:29:02 DEBUG WPS2SM:101 - Guessed default value: decimallatitude +2016-04-07 15:29:02 DEBUG WPS2SM:130 - Machter title: The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude] +2016-04-07 15:29:02 DEBUG WPS2SM:131 - Machter find: true +2016-04-07 15:29:02 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-07 15:29:02 DEBUG WPS2SM:133 - Machter start: 39 +2016-04-07 15:29:02 DEBUG WPS2SM:134 - Machter end: 81 +2016-04-07 15:29:02 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-07 15:29:02 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-07 15:29:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing latitude values [the name of a column from OccurrencesTable] +2016-04-07 15:29:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LatitudeColumn +2016-04-07 15:29:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:02 DEBUG SClient4WPS:658 - InputParameter: Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN] +2016-04-07 15:29:02 DEBUG WPS2SM:254 - Conversion to SM Type->XResolution is a Literal Input +2016-04-07 15:29:02 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:02 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 15:29:02 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 15:29:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the X axis in decimal degrees +2016-04-07 15:29:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:XResolution +2016-04-07 15:29:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:02 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 15:29:02 DEBUG WPS2SM:254 - Conversion to SM Type->YResolution is a Literal Input +2016-04-07 15:29:02 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:02 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 15:29:02 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 15:29:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the Y axis in decimal degrees +2016-04-07 15:29:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:YResolution +2016-04-07 15:29:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:02 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 15:29:02 DEBUG WPS2SM:254 - Conversion to SM Type->Layers is a Literal Input +2016-04-07 15:29:02 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:02 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:29:02 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 15:29:02 DEBUG WPS2SM:147 - Machter title: The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 15:29:02 DEBUG WPS2SM:148 - Machter find: true +2016-04-07 15:29:02 DEBUG WPS2SM:149 - Machter group: a sequence of values separated by | +2016-04-07 15:29:02 DEBUG WPS2SM:150 - Machter start: 501 +2016-04-07 15:29:02 DEBUG WPS2SM:151 - Machter end: 536 +2016-04-07 15:29:02 DEBUG WPS2SM:152 - Machter Group Count: 1 +2016-04-07 15:29:02 DEBUG WPS2SM:155 - Matcher separator: | +2016-04-07 15:29:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) +2016-04-07 15:29:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Layers +2016-04-07 15:29:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:02 DEBUG SClient4WPS:658 - InputParameter: ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST] +2016-04-07 15:29:02 DEBUG WPS2SM:254 - Conversion to SM Type->Z is a Literal Input +2016-04-07 15:29:02 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:02 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 15:29:02 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-07 15:29:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer +2016-04-07 15:29:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Z +2016-04-07 15:29:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:02 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-07 15:29:02 DEBUG WPS2SM:254 - Conversion to SM Type->TimeIndex is a Literal Input +2016-04-07 15:29:02 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:02 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 15:29:02 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-07 15:29:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Time Index. The default is the first time indexed in the input environmental datasets +2016-04-07 15:29:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:TimeIndex +2016-04-07 15:29:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:02 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-07 15:29:02 DEBUG SClient4WPS:662 - Parameters: [ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT], TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN], Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST], ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT]] +2016-04-07 15:29:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:29:03 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:29:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:29:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:29:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:29:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:29:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:29:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:29:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:29:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:29:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 15:29:03 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 15:29:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 15:29:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:29:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:29:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:29:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:29:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:29:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:29:03 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 15:29:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:29:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 15:29:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:29:03 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:29:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:29:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:29:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:29:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:29:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:29:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:29:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:29:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:29:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:29:03 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:29:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:29:03 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 15:29:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:29:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:29:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:29:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:29:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:29:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:29:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:29:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:29:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:29:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:29:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:29:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:29:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:29:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:29:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:29:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:29:03 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 15:29:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:29:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:29:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:29:03 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 15:29:03 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 15:29:03 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 15:29:03 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 15:29:03 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 15:29:03 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 15:29:03 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 15:29:03 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 15:29:03 INFO WorkspaceExplorerServiceImpl:142 - end time - 230 msc 0 sec +2016-04-07 15:29:03 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 15:29:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:29:11 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:29:11 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:29:11 INFO SClient4WPS:643 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING +2016-04-07 15:29:11 DEBUG SClient4WPS:276 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:29:11 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 15:29:12 DEBUG SClient4WPS:297 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING + MAX_ENT_NICHE_MODELLING + A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt + + + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + + + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + + + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + + + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + + + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + + + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + + + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + + + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + + + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + + + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + + + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + + + + + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + + + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + + + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + + + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + + + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + + + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 15:29:12 DEBUG SClient4WPS:301 - WPSClient->Fetching Inputs +2016-04-07 15:29:12 DEBUG SClient4WPS:303 - WPSClient->Input: + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + +2016-04-07 15:29:12 DEBUG SClient4WPS:303 - WPSClient->Input: + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + +2016-04-07 15:29:12 DEBUG SClient4WPS:303 - WPSClient->Input: + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + +2016-04-07 15:29:12 DEBUG SClient4WPS:303 - WPSClient->Input: + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + +2016-04-07 15:29:12 DEBUG SClient4WPS:303 - WPSClient->Input: + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 15:29:12 DEBUG SClient4WPS:303 - WPSClient->Input: + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + +2016-04-07 15:29:12 DEBUG SClient4WPS:303 - WPSClient->Input: + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + +2016-04-07 15:29:12 DEBUG SClient4WPS:303 - WPSClient->Input: + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + +2016-04-07 15:29:12 DEBUG SClient4WPS:303 - WPSClient->Input: + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + +2016-04-07 15:29:12 DEBUG SClient4WPS:303 - WPSClient->Input: + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + +2016-04-07 15:29:12 DEBUG SClient4WPS:303 - WPSClient->Input: + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + +2016-04-07 15:29:12 DEBUG SClient4WPS:303 - WPSClient->Input: + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + +2016-04-07 15:29:12 DEBUG SClient4WPS:308 - WPSClient->Fetching Outputs +2016-04-07 15:29:12 DEBUG SClient4WPS:310 - WPSClient->Output: + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + +2016-04-07 15:29:12 DEBUG SClient4WPS:310 - WPSClient->Output: + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + +2016-04-07 15:29:12 DEBUG SClient4WPS:310 - WPSClient->Output: + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + +2016-04-07 15:29:12 DEBUG SClient4WPS:310 - WPSClient->Output: + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + +2016-04-07 15:29:12 DEBUG SClient4WPS:310 - WPSClient->Output: + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + +2016-04-07 15:29:12 DEBUG SClient4WPS:310 - WPSClient->Output: + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 15:29:12 DEBUG SClient4WPS:310 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 15:29:12 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:29:12 DEBUG WPS2SM:254 - Conversion to SM Type->OutputTableLabel is a Literal Input +2016-04-07 15:29:12 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:12 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:29:12 DEBUG WPS2SM:101 - Guessed default value: maxent_ +2016-04-07 15:29:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the table to produce +2016-04-07 15:29:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OutputTableLabel +2016-04-07 15:29:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:12 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT] +2016-04-07 15:29:12 DEBUG WPS2SM:254 - Conversion to SM Type->SpeciesName is a Literal Input +2016-04-07 15:29:12 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:12 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:29:12 DEBUG WPS2SM:101 - Guessed default value: generic_species +2016-04-07 15:29:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the species to model and the occurrence records refer to +2016-04-07 15:29:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:SpeciesName +2016-04-07 15:29:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:12 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT] +2016-04-07 15:29:12 DEBUG WPS2SM:254 - Conversion to SM Type->MaxIterations is a Literal Input +2016-04-07 15:29:12 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 15:29:12 DEBUG WPS2SM:101 - Guessed default value: 1000 +2016-04-07 15:29:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The number of learning iterations of the MaxEnt algorithm +2016-04-07 15:29:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:MaxIterations +2016-04-07 15:29:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:12 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT] +2016-04-07 15:29:12 DEBUG WPS2SM:254 - Conversion to SM Type->DefaultPrevalence is a Literal Input +2016-04-07 15:29:12 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 15:29:12 DEBUG WPS2SM:101 - Guessed default value: 0.5 +2016-04-07 15:29:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A priori probability of presence at ordinary occurrence points +2016-04-07 15:29:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:DefaultPrevalence +2016-04-07 15:29:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:12 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT] +2016-04-07 15:29:12 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencesTable is a Complex Input +2016-04-07 15:29:12 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 15:29:12 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 15:29:12 DEBUG WPS2SM:201 - Schema: null +2016-04-07 15:29:12 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 15:29:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] +2016-04-07 15:29:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencesTable +2016-04-07 15:29:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:12 DEBUG SClient4WPS:658 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 15:29:12 DEBUG WPS2SM:254 - Conversion to SM Type->LongitudeColumn is a Literal Input +2016-04-07 15:29:12 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:12 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:29:12 DEBUG WPS2SM:101 - Guessed default value: decimallongitude +2016-04-07 15:29:12 DEBUG WPS2SM:130 - Machter title: The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude] +2016-04-07 15:29:12 DEBUG WPS2SM:131 - Machter find: true +2016-04-07 15:29:12 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-07 15:29:12 DEBUG WPS2SM:133 - Machter start: 40 +2016-04-07 15:29:12 DEBUG WPS2SM:134 - Machter end: 82 +2016-04-07 15:29:12 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-07 15:29:12 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-07 15:29:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing longitude values [the name of a column from OccurrencesTable] +2016-04-07 15:29:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LongitudeColumn +2016-04-07 15:29:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:12 DEBUG SClient4WPS:658 - InputParameter: Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN] +2016-04-07 15:29:12 DEBUG WPS2SM:254 - Conversion to SM Type->LatitudeColumn is a Literal Input +2016-04-07 15:29:12 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:12 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:29:12 DEBUG WPS2SM:101 - Guessed default value: decimallatitude +2016-04-07 15:29:12 DEBUG WPS2SM:130 - Machter title: The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude] +2016-04-07 15:29:12 DEBUG WPS2SM:131 - Machter find: true +2016-04-07 15:29:12 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-07 15:29:12 DEBUG WPS2SM:133 - Machter start: 39 +2016-04-07 15:29:12 DEBUG WPS2SM:134 - Machter end: 81 +2016-04-07 15:29:12 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-07 15:29:12 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-07 15:29:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing latitude values [the name of a column from OccurrencesTable] +2016-04-07 15:29:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LatitudeColumn +2016-04-07 15:29:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:12 DEBUG SClient4WPS:658 - InputParameter: Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN] +2016-04-07 15:29:12 DEBUG WPS2SM:254 - Conversion to SM Type->XResolution is a Literal Input +2016-04-07 15:29:12 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 15:29:12 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 15:29:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the X axis in decimal degrees +2016-04-07 15:29:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:XResolution +2016-04-07 15:29:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:12 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 15:29:12 DEBUG WPS2SM:254 - Conversion to SM Type->YResolution is a Literal Input +2016-04-07 15:29:12 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 15:29:12 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 15:29:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the Y axis in decimal degrees +2016-04-07 15:29:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:YResolution +2016-04-07 15:29:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:12 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 15:29:12 DEBUG WPS2SM:254 - Conversion to SM Type->Layers is a Literal Input +2016-04-07 15:29:12 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:12 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 15:29:12 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 15:29:12 DEBUG WPS2SM:147 - Machter title: The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 15:29:12 DEBUG WPS2SM:148 - Machter find: true +2016-04-07 15:29:12 DEBUG WPS2SM:149 - Machter group: a sequence of values separated by | +2016-04-07 15:29:12 DEBUG WPS2SM:150 - Machter start: 501 +2016-04-07 15:29:12 DEBUG WPS2SM:151 - Machter end: 536 +2016-04-07 15:29:12 DEBUG WPS2SM:152 - Machter Group Count: 1 +2016-04-07 15:29:12 DEBUG WPS2SM:155 - Matcher separator: | +2016-04-07 15:29:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) +2016-04-07 15:29:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Layers +2016-04-07 15:29:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:12 DEBUG SClient4WPS:658 - InputParameter: ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST] +2016-04-07 15:29:12 DEBUG WPS2SM:254 - Conversion to SM Type->Z is a Literal Input +2016-04-07 15:29:12 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-07 15:29:12 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-07 15:29:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer +2016-04-07 15:29:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Z +2016-04-07 15:29:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:12 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-07 15:29:12 DEBUG WPS2SM:254 - Conversion to SM Type->TimeIndex is a Literal Input +2016-04-07 15:29:12 DEBUG WPS2SM:93 - WPS type: +2016-04-07 15:29:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 15:29:12 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-07 15:29:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Time Index. The default is the first time indexed in the input environmental datasets +2016-04-07 15:29:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:TimeIndex +2016-04-07 15:29:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 15:29:12 DEBUG SClient4WPS:658 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-07 15:29:12 DEBUG SClient4WPS:662 - Parameters: [ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT], TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN], Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST], ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT]] +2016-04-07 15:29:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:29:12 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:29:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:29:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:29:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:29:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:29:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:29:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:29:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:29:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:29:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:29:12 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:29:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:29:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:29:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:29:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:29:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:29:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:29:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:29:12 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 15:29:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:29:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 15:29:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:29:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:29:12 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:29:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:29:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 15:29:12 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 15:29:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 15:29:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:29:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:29:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:29:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:29:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:29:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:29:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:29:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:29:12 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 15:29:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:29:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:29:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 15:29:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 15:29:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 15:29:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 15:29:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 15:29:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 15:29:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:29:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:29:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:29:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:29:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:29:12 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 15:29:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:29:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:29:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 15:29:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 15:29:12 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 15:29:12 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 15:29:12 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 15:29:12 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 15:29:12 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 15:29:12 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 15:29:12 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 15:29:12 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 15:29:12 INFO WorkspaceExplorerServiceImpl:142 - end time - 184 msc 0 sec +2016-04-07 15:29:12 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 15:29:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 15:29:15 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 15:30:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:30:11 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:31:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:31:05 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:32:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:32:00 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:35:30 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 15:35:30 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 15:35:30 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 15:35:30 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 15:35:30 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 15:35:30 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 15:35:30 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 15:35:30 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7bad6ffd +2016-04-07 15:35:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:35:30 INFO ASLSession:352 - Logging the entrance +2016-04-07 15:35:30 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 15:35:30 DEBUG TemplateModel:83 - 2016-04-07 15:35:30, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 15:35:30 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:35:30 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 15:35:34 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 15:35:34 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 15:35:34 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 15:35:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:35:34 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 15:35:34 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:35:34 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 15:35:34 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 131 ms +2016-04-07 15:35:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 15:35:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 15:35:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 15:35:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 15:35:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 15:35:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 15:35:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 15:35:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 15:35:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 15:35:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 15:35:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 15:35:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 15:35:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 15:35:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 15:35:34 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 15:35:34 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:35:34 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 15:35:34 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@43c15d5a +2016-04-07 15:35:34 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@499b12e2 +2016-04-07 15:35:34 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@44efc1d1 +2016-04-07 15:35:34 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@7c12df16 +2016-04-07 15:35:34 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 93 ms +2016-04-07 15:35:35 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 15:35:35 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 15:35:35 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 15:35:35 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 15:35:35 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 15:35:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 15:35:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:35:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 15:35:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 25 ms +2016-04-07 15:35:35 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 15:35:35 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:35:35 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 15:35:35 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 15:35:36 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:35:36 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 15:36:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:36:25 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:37:41 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 15:37:41 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 15:37:41 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 15:37:41 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 15:37:41 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 15:37:41 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 15:37:41 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 15:37:41 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@38688ffa +2016-04-07 15:37:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 15:37:41 INFO ASLSession:352 - Logging the entrance +2016-04-07 15:37:41 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 15:37:41 DEBUG TemplateModel:83 - 2016-04-07 15:37:41, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 15:37:41 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:37:41 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 15:37:45 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 15:37:45 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 15:37:45 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 15:37:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 15:37:45 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 15:37:45 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:37:45 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 15:37:45 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 145 ms +2016-04-07 15:37:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 15:37:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 15:37:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 15:37:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 15:37:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 15:37:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 15:37:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 15:37:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 15:37:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 15:37:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 15:37:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 15:37:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 15:37:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 15:37:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 15:37:45 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 15:37:45 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:37:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 15:37:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@c3f8b42 +2016-04-07 15:37:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@74115c0c +2016-04-07 15:37:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@56ffaf9 +2016-04-07 15:37:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@5f6ef762 +2016-04-07 15:37:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 112 ms +2016-04-07 15:37:45 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 15:37:45 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 15:37:45 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 15:37:46 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 15:37:46 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 15:37:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 15:37:46 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 15:37:46 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 15:37:46 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 26 ms +2016-04-07 15:37:46 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 15:37:46 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:37:46 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 15:37:46 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 15:37:47 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:37:47 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 15:38:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:38:36 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:38:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:38:58 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:38:58 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:38:58 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 15:39:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:39:01 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:39:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 15:39:01 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 15:39:02 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 15:39:02 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 15:39:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:39:53 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:40:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:40:48 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:41:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:41:43 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:42:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:42:38 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:43:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:43:33 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:44:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 15:44:28 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 15:45:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:45:23 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:46:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:46:18 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:47:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:47:13 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:48:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:48:08 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 15:49:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:49:03 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:49:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 15:49:58 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 15:50:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:50:53 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:51:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:51:48 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:52:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:52:43 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 15:53:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:53:38 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:54:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:54:33 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 15:55:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 15:55:28 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 15:56:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 15:56:23 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 15:57:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:57:18 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:58:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 15:58:13 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 15:59:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 15:59:08 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 16:00:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:00:03 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:00:57 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 16:00:57 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 16:00:57 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 16:00:57 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 16:00:57 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:00:57 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:00:57 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 16:00:57 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7a5dacf1 +2016-04-07 16:00:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:00:57 INFO ASLSession:352 - Logging the entrance +2016-04-07 16:00:57 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 16:00:57 DEBUG TemplateModel:83 - 2016-04-07 16:00:57, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 16:00:57 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 16:00:57 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 16:01:03 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 16:01:03 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 16:01:03 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 16:01:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:01:03 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 16:01:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 16:01:03 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 16:01:03 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 122 ms +2016-04-07 16:01:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 16:01:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 16:01:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 16:01:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 16:01:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 16:01:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 16:01:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 16:01:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 16:01:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 16:01:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 16:01:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 16:01:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 16:01:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 16:01:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 16:01:03 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 16:01:03 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:01:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 16:01:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@169e3d1e +2016-04-07 16:01:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@74e8f258 +2016-04-07 16:01:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6380a42a +2016-04-07 16:01:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@51a2933 +2016-04-07 16:01:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 106 ms +2016-04-07 16:01:03 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 16:01:03 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 16:01:03 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 16:01:03 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 16:01:03 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 16:01:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:01:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:01:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 16:01:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 24 ms +2016-04-07 16:01:03 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 16:01:03 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:01:03 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 16:01:03 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 16:01:05 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:01:05 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 16:01:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 16:01:52 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 16:02:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 16:02:47 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 16:03:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 16:03:42 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 16:04:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 16:04:37 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 16:05:09 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 16:05:09 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 16:05:09 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 16:05:09 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 16:05:09 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:05:09 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 16:05:09 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 16:05:09 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@4de444b5 +2016-04-07 16:05:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 16:05:09 INFO ASLSession:352 - Logging the entrance +2016-04-07 16:05:09 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 16:05:09 DEBUG TemplateModel:83 - 2016-04-07 16:05:09, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 16:05:09 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 16:05:09 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 16:05:12 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 16:05:12 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 16:05:12 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 16:05:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:05:12 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 16:05:12 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 16:05:13 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 16:05:13 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 129 ms +2016-04-07 16:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 16:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 16:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 16:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 16:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 16:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 16:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 16:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 16:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 16:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 16:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 16:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 16:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 16:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 16:05:13 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 16:05:13 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:05:13 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 16:05:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@624155e +2016-04-07 16:05:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@688b6583 +2016-04-07 16:05:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6e2722e4 +2016-04-07 16:05:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@32028677 +2016-04-07 16:05:13 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 101 ms +2016-04-07 16:05:13 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 16:05:13 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 16:05:13 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 16:05:13 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 16:05:13 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 16:05:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:05:13 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:05:13 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 16:05:13 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-07 16:05:13 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 16:05:13 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:05:13 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 16:05:13 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 16:05:14 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:05:14 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 16:06:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:06:04 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 16:07:28 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 16:07:28 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 16:07:28 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 16:07:28 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 16:07:28 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:07:28 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 16:07:28 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 16:07:28 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2197a31d +2016-04-07 16:07:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 16:07:28 INFO ASLSession:352 - Logging the entrance +2016-04-07 16:07:28 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 16:07:28 DEBUG TemplateModel:83 - 2016-04-07 16:07:28, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 16:07:28 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 16:07:28 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 16:07:32 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 16:07:32 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 16:07:32 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 16:07:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 16:07:32 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 16:07:32 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 16:07:32 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 16:07:32 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 149 ms +2016-04-07 16:07:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 16:07:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 16:07:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 16:07:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 16:07:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 16:07:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 16:07:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 16:07:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 16:07:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 16:07:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 16:07:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 16:07:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 16:07:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 16:07:33 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 16:07:33 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 16:07:33 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:07:33 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 16:07:33 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@54968d7f +2016-04-07 16:07:33 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@35c1fc4a +2016-04-07 16:07:33 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6af14528 +2016-04-07 16:07:33 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@5ebe7dc2 +2016-04-07 16:07:33 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 100 ms +2016-04-07 16:07:33 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 16:07:33 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 16:07:33 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 16:07:33 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 16:07:33 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 16:07:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 16:07:33 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:07:33 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 16:07:33 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 32 ms +2016-04-07 16:07:33 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 16:07:33 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:07:33 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 16:07:33 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 16:07:34 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:07:34 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 16:08:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 16:08:23 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 16:09:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 16:09:18 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 16:11:07 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 16:11:07 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 16:11:07 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 16:11:07 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 16:11:07 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:11:07 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:11:07 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 16:11:07 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@295e4dad +2016-04-07 16:11:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:11:07 INFO ASLSession:352 - Logging the entrance +2016-04-07 16:11:07 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 16:11:07 DEBUG TemplateModel:83 - 2016-04-07 16:11:07, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 16:11:07 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 16:11:07 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 16:11:10 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 16:11:10 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 16:11:10 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 16:11:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:11:10 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 16:11:10 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 16:11:10 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 16:11:10 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 115 ms +2016-04-07 16:11:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 16:11:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 16:11:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 16:11:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 16:11:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 16:11:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 16:11:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 16:11:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 16:11:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 16:11:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 16:11:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 16:11:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 16:11:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 16:11:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 16:11:10 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 16:11:10 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:11:11 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 16:11:11 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@17bc987a +2016-04-07 16:11:11 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@39e76774 +2016-04-07 16:11:11 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@18580e53 +2016-04-07 16:11:11 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@6441caeb +2016-04-07 16:11:11 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 101 ms +2016-04-07 16:11:11 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 16:11:11 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 16:11:11 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 16:11:11 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 16:11:11 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 16:11:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:11:11 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:11:11 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 16:11:11 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 26 ms +2016-04-07 16:11:11 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 16:11:11 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:11:11 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 16:11:11 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 16:11:12 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:11:12 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 16:12:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 16:12:02 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 16:12:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:12:57 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:14:11 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 16:14:11 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 16:14:11 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 16:14:11 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 16:14:11 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:14:11 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 16:14:11 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 16:14:11 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@5d6929e1 +2016-04-07 16:14:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 16:14:11 INFO ASLSession:352 - Logging the entrance +2016-04-07 16:14:11 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 16:14:11 DEBUG TemplateModel:83 - 2016-04-07 16:14:11, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 16:14:11 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 16:14:11 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 16:14:16 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 16:14:16 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 16:14:16 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 16:14:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:14:16 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:14:16 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 16:14:16 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 16:14:16 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 127 ms +2016-04-07 16:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 16:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 16:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 16:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 16:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 16:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 16:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 16:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 16:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 16:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 16:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 16:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 16:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 16:14:16 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 16:14:16 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 16:14:16 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:14:17 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 16:14:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@4e59016 +2016-04-07 16:14:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@67311e4b +2016-04-07 16:14:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@cef97d7 +2016-04-07 16:14:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@29342a8d +2016-04-07 16:14:17 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 113 ms +2016-04-07 16:14:17 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 16:14:17 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 16:14:17 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 16:14:17 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 16:14:17 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 16:14:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:14:17 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:14:17 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 16:14:17 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 19 ms +2016-04-07 16:14:17 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 16:14:17 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:14:17 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 16:14:17 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 16:14:18 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:14:18 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 16:14:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 16:14:24 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 16:14:24 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 16:14:24 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-07 16:14:24 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:14:24 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 16:14:25 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 16:14:25 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 16:14:25 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 16:14:25 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 16:14:25 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 16:14:25 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-07 16:14:25 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-07 16:14:25 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-07 16:14:25 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-07 16:14:25 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 16:14:25 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 16:14:25 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 16:14:25 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:14:25 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 16:14:25 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 16:14:25 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 16:14:25 DEBUG WPS2SM:201 - Schema: null +2016-04-07 16:14:25 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 16:14:25 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 16:14:25 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 16:14:25 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:14:25 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 16:14:25 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 16:14:25 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:14:25 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 16:14:25 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 16:14:25 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 16:14:25 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 16:14:25 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 16:14:25 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 16:14:25 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 16:14:25 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 16:14:25 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 16:14:25 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 16:14:25 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 16:14:25 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 16:14:25 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:14:25 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 16:14:25 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 16:14:25 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:14:25 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 16:14:25 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 16:14:25 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 16:14:25 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 16:14:25 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:14:25 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 16:14:25 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-07 16:14:25 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:14:25 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 16:14:25 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-07 16:14:25 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-07 16:14:25 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-07 16:14:25 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:14:25 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-07 16:14:25 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-07 16:14:25 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:14:25 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 16:14:25 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 16:14:25 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-07 16:14:25 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-07 16:14:25 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:14:25 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 16:14:25 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-07 16:14:25 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:14:25 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 16:14:25 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-07 16:14:25 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-07 16:14:25 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-07 16:14:25 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:14:25 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-07 16:14:25 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 16:14:25 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:14:25 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 16:14:25 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-07 16:14:25 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-07 16:14:25 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 16:14:25 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:14:25 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-07 16:14:25 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-07 16:14:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:14:25 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 16:14:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:14:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 16:14:25 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 16:14:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 16:14:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:14:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:14:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:14:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:14:25 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 16:14:25 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 16:14:25 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 16:14:25 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 16:14:25 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 16:14:25 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 16:14:25 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 16:14:25 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 16:14:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 16:14:25 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:14:25 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 16:14:25 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-07 16:14:25 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 16:14:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:14:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:14:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:14:25 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 16:14:25 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 16:14:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:14:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:14:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:14:25 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 16:14:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:14:25 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:14:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:14:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:14:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:14:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:14:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:14:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 16:14:25 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 16:14:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 16:14:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:14:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:14:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:14:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:14:26 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 16:14:26 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 16:14:26 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 16:14:26 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 16:14:26 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 16:14:26 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 16:14:26 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 16:14:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:14:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:14:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:14:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:14:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:14:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:14:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:14:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:14:26 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 16:14:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:14:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:14:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:14:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:14:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:14:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 16:14:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:14:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:14:26 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:14:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:14:26 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 16:14:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:14:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:14:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:14:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:14:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:14:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:14:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:14:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:14:26 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 16:14:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 16:14:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 16:14:26 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:14:26 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 16:14:26 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 31 ms +2016-04-07 16:14:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 16:14:26 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:14:26 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 16:14:26 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-07 16:14:26 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 16:14:26 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 16:14:26 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 16:14:26 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 16:14:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 16:14:26 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 16:14:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 16:14:26 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:14:26 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 16:14:26 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-07 16:14:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 16:14:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 16:14:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 16:14:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 16:14:26 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 16:14:26 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 16:14:26 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 16:14:26 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 16:14:26 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 16:14:26 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 16:14:26 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 16:14:26 INFO WorkspaceExplorerServiceImpl:142 - end time - 459 msc 0 sec +2016-04-07 16:14:26 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 16:15:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:15:06 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 16:15:45 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 16:15:45 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 16:15:45 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 16:15:45 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 16:15:45 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:15:45 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 16:15:45 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 16:15:45 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@366804cb +2016-04-07 16:15:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:15:45 INFO ASLSession:352 - Logging the entrance +2016-04-07 16:15:45 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 16:15:45 DEBUG TemplateModel:83 - 2016-04-07 16:15:45, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 16:15:45 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 16:15:45 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 16:15:48 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 16:15:48 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 16:15:48 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 16:15:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:15:48 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 16:15:48 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 16:15:48 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 16:15:48 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 125 ms +2016-04-07 16:15:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 16:15:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 16:15:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 16:15:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 16:15:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 16:15:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 16:15:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 16:15:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 16:15:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 16:15:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 16:15:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 16:15:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 16:15:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 16:15:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 16:15:48 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 16:15:49 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:15:49 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 16:15:49 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@45057093 +2016-04-07 16:15:49 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@12f84f37 +2016-04-07 16:15:49 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@f781846 +2016-04-07 16:15:49 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@31d74855 +2016-04-07 16:15:49 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 116 ms +2016-04-07 16:15:49 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 16:15:49 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 16:15:49 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 16:15:49 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 16:15:49 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 16:15:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:15:49 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:15:49 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 16:15:49 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-07 16:15:49 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 16:15:49 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:15:49 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 16:15:49 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 16:15:50 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:15:50 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 16:15:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:15:53 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 16:15:53 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 16:15:53 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-07 16:15:53 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:15:53 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 16:15:54 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 16:15:54 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 16:15:54 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 16:15:54 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 16:15:54 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 16:15:54 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-07 16:15:54 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-07 16:15:54 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 16:15:54 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 16:15:54 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 16:15:54 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:15:54 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 16:15:54 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 16:15:54 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 16:15:54 DEBUG WPS2SM:201 - Schema: null +2016-04-07 16:15:54 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 16:15:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 16:15:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 16:15:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:15:54 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 16:15:54 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 16:15:54 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:15:54 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 16:15:54 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 16:15:54 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 16:15:54 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 16:15:54 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 16:15:54 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 16:15:54 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 16:15:54 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 16:15:54 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 16:15:54 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 16:15:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 16:15:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 16:15:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:15:54 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 16:15:54 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 16:15:54 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:15:54 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 16:15:54 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 16:15:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 16:15:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 16:15:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:15:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 16:15:54 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-07 16:15:54 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:15:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 16:15:54 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 16:15:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-07 16:15:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-07 16:15:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:15:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 16:15:54 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 16:15:54 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:15:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 16:15:54 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 16:15:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-07 16:15:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 16:15:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:15:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 16:15:54 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-07 16:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 16:15:54 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 16:15:54 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 16:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:15:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:15:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:15:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:15:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:15:54 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 16:15:54 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 16:15:54 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 16:15:54 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 16:15:54 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 16:15:54 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 16:15:54 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 16:15:54 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 16:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 16:15:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:15:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 16:15:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-07 16:15:54 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 16:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:15:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:15:54 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 16:15:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:15:54 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 16:15:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:15:54 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 16:15:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:15:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 16:15:54 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 16:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 16:15:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:15:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:15:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:15:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 16:15:54 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 16:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 16:15:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:15:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:15:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:15:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:15:54 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 16:15:55 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 16:15:55 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 16:15:55 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 16:15:55 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 16:15:55 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 16:15:55 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 16:15:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:15:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:15:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:15:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:15:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:15:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:15:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:15:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:15:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:15:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:15:55 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 16:15:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:15:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:15:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 16:15:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:15:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:15:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:15:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:15:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 16:15:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 16:15:55 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 16:15:55 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 16:15:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:15:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:15:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:15:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:15:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:15:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:15:55 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 16:15:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 16:15:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 16:15:55 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:15:55 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 16:15:55 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 34 ms +2016-04-07 16:15:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 16:15:55 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:15:55 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 16:15:55 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-07 16:15:55 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 16:15:55 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 16:15:55 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 16:15:55 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 16:15:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 16:15:55 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 16:15:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 16:15:55 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 16:15:55 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 16:15:55 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-07 16:15:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 16:15:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 16:15:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 16:15:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 16:15:55 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 16:15:55 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 16:15:55 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 16:15:55 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 16:15:55 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 16:15:55 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 16:15:55 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 16:15:55 INFO WorkspaceExplorerServiceImpl:142 - end time - 417 msc 0 sec +2016-04-07 16:15:55 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 16:15:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:15:59 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 16:15:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 16:15:59 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-07 16:15:59 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:15:59 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 16:15:59 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 16:15:59 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 16:15:59 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 16:15:59 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 16:15:59 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 16:15:59 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-07 16:15:59 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-07 16:15:59 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-07 16:15:59 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-07 16:15:59 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 16:15:59 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 16:15:59 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 16:15:59 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:15:59 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 16:15:59 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 16:15:59 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 16:15:59 DEBUG WPS2SM:201 - Schema: null +2016-04-07 16:15:59 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 16:15:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 16:15:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 16:15:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:15:59 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 16:15:59 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 16:15:59 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:15:59 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 16:15:59 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 16:15:59 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 16:15:59 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 16:15:59 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 16:15:59 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 16:15:59 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 16:15:59 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 16:15:59 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 16:15:59 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 16:15:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 16:15:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 16:15:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:15:59 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 16:15:59 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 16:15:59 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:15:59 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 16:15:59 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 16:15:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 16:15:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 16:15:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:15:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 16:15:59 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-07 16:15:59 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:15:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 16:15:59 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-07 16:15:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-07 16:15:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-07 16:15:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:15:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-07 16:15:59 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-07 16:15:59 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:15:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 16:15:59 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 16:15:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-07 16:15:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-07 16:15:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:15:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 16:15:59 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-07 16:15:59 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:15:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 16:15:59 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-07 16:15:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-07 16:15:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-07 16:15:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:15:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-07 16:15:59 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 16:15:59 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:15:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 16:15:59 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-07 16:15:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-07 16:15:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 16:15:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:15:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-07 16:15:59 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-07 16:15:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 16:15:59 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 16:15:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 16:15:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:15:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:15:59 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:15:59 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:15:59 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:15:59 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:15:59 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:15:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 16:15:59 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 16:15:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 16:15:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:15:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:15:59 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:15:59 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:15:59 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:15:59 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:15:59 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 16:15:59 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 16:15:59 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:15:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:15:59 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 16:15:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:15:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:15:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:15:59 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:15:59 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:15:59 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:15:59 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:15:59 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:15:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 16:15:59 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 16:15:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 16:15:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:15:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:15:59 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:15:59 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:15:59 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:15:59 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:15:59 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:15:59 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:15:59 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:15:59 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:15:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 16:15:59 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 16:15:59 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:16:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:16:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:16:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:16:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 16:16:00 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 16:16:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:16:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:16:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:16:00 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 16:16:00 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 16:16:00 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 16:16:00 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 16:16:00 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 16:16:00 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 16:16:00 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 16:16:00 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 16:16:00 INFO WorkspaceExplorerServiceImpl:142 - end time - 222 msc 0 sec +2016-04-07 16:16:00 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 16:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 16:16:01 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 16:16:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 16:16:01 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-07 16:16:01 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:16:01 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 16:16:01 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 16:16:01 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 16:16:01 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 16:16:01 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 16:16:01 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 16:16:01 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-07 16:16:01 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-07 16:16:01 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 16:16:01 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 16:16:01 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 16:16:01 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:16:01 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 16:16:01 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 16:16:01 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 16:16:01 DEBUG WPS2SM:201 - Schema: null +2016-04-07 16:16:01 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 16:16:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 16:16:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 16:16:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:16:01 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 16:16:01 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 16:16:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:16:01 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 16:16:01 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 16:16:01 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 16:16:01 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 16:16:01 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 16:16:01 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 16:16:01 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 16:16:01 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 16:16:01 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 16:16:01 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 16:16:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 16:16:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 16:16:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:16:01 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 16:16:01 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 16:16:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:16:01 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 16:16:01 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 16:16:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 16:16:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 16:16:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:16:01 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 16:16:01 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-07 16:16:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:16:01 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 16:16:01 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 16:16:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-07 16:16:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-07 16:16:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:16:01 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 16:16:01 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 16:16:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:16:01 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 16:16:01 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 16:16:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-07 16:16:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 16:16:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:16:01 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 16:16:01 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-07 16:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:16:01 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 16:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:16:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:16:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:16:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:16:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:16:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:16:01 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:16:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 16:16:01 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 16:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 16:16:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:16:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:16:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:16:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:16:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:16:01 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:16:01 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 16:16:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:16:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 16:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:16:01 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 16:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:16:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:16:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:16:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:16:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:16:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:16:01 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:16:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:16:01 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:16:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:16:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:16:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:16:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:16:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:16:01 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:16:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:16:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:16:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:16:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:16:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:16:02 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 16:16:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:16:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:16:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:16:02 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:16:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:16:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:16:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:16:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:16:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:16:02 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 16:16:02 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 16:16:02 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 16:16:02 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 16:16:02 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 16:16:02 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 16:16:02 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 16:16:02 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 16:16:02 INFO WorkspaceExplorerServiceImpl:142 - end time - 214 msc 0 sec +2016-04-07 16:16:02 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 16:16:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:16:40 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 16:17:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:17:35 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 16:18:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 16:18:30 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 16:19:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 16:19:25 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 16:20:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:20:20 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:21:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:21:15 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 16:22:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 16:22:10 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 16:23:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:23:05 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 16:23:05 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 16:23:05 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-07 16:23:05 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:23:05 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 16:23:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:23:05 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:23:06 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 16:23:06 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 16:23:06 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 16:23:06 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 16:23:06 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 16:23:06 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-07 16:23:06 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-07 16:23:06 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-07 16:23:06 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-07 16:23:06 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 16:23:06 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 16:23:06 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 16:23:06 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:23:06 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 16:23:06 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 16:23:06 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 16:23:06 DEBUG WPS2SM:201 - Schema: null +2016-04-07 16:23:06 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 16:23:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 16:23:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 16:23:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:23:06 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 16:23:06 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 16:23:06 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:23:06 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 16:23:06 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 16:23:06 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 16:23:06 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 16:23:06 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 16:23:06 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 16:23:06 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 16:23:06 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 16:23:06 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 16:23:06 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 16:23:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 16:23:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 16:23:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:23:06 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 16:23:06 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 16:23:06 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:23:06 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 16:23:06 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 16:23:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 16:23:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 16:23:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:23:06 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 16:23:06 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-07 16:23:06 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:23:06 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 16:23:06 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-07 16:23:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-07 16:23:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-07 16:23:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:23:06 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-07 16:23:06 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-07 16:23:06 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:23:06 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 16:23:06 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 16:23:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-07 16:23:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-07 16:23:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:23:06 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 16:23:06 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-07 16:23:06 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:23:06 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 16:23:06 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-07 16:23:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-07 16:23:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-07 16:23:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:23:06 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-07 16:23:06 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 16:23:06 DEBUG WPS2SM:93 - WPS type: +2016-04-07 16:23:06 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 16:23:06 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-07 16:23:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-07 16:23:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 16:23:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 16:23:06 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-07 16:23:06 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-07 16:23:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:23:06 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 16:23:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:23:06 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:23:06 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:23:06 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:23:06 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:23:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:23:06 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:23:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:23:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 16:23:06 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 16:23:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 16:23:06 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:23:06 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:23:06 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:23:06 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:23:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:23:06 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:23:06 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 16:23:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:23:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 16:23:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 16:23:06 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 16:23:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 16:23:06 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:23:06 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:23:06 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:23:06 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:23:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:23:06 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:23:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:23:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:23:06 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:23:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:23:06 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 16:23:06 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 16:23:06 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 16:23:06 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 16:23:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 16:23:06 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 16:23:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:23:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:23:06 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 16:23:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:23:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:23:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:23:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:23:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:23:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:23:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:23:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:23:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:23:06 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:23:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 16:23:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 16:23:06 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 16:23:06 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 16:23:06 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 16:23:06 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 16:23:06 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 16:23:06 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 16:23:06 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 16:23:06 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 16:23:06 INFO WorkspaceExplorerServiceImpl:142 - end time - 196 msc 0 sec +2016-04-07 16:23:06 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 16:24:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:24:00 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:24:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:24:55 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 16:25:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:25:50 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:26:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:26:45 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 16:27:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:27:40 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:28:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:28:35 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:29:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:29:30 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 16:30:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 16:30:25 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 16:31:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:31:20 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 16:32:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 16:32:15 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 16:33:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:33:10 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:34:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 16:34:05 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 16:35:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:35:00 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 16:35:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:35:55 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 16:36:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:36:50 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 16:37:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 16:37:45 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:38:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:38:40 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 16:39:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 16:39:35 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 16:40:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:40:30 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 16:41:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 16:41:25 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 16:42:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:42:20 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 16:43:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 16:43:15 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 16:44:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 16:44:10 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 16:45:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 16:45:05 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 16:46:00 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:46:00 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:46:00 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:46:00 WARN SessionCheckerServiceImpl:80 - Scope is null at Thu Apr 07 16:46:00 CEST 2016 +2016-04-07 16:46:00 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:46:41 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 16:46:41 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 16:46:41 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 16:46:41 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:46:41 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:46:41 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:46:41 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi null +2016-04-07 16:46:41 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:46:41 DEBUG ASLSession:311 - Scope is null, returning null +2016-04-07 16:46:41 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=null, scopeName=null, userEmailAddress=null, userFullName=null] +2016-04-07 16:46:44 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:46:44 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:46:44 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:46:44 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi null +2016-04-07 16:46:44 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 16:46:45 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:46:45 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 16:47:36 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:47:36 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:47:36 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:47:36 WARN SessionCheckerServiceImpl:80 - Scope is null at Thu Apr 07 16:47:36 CEST 2016 +2016-04-07 16:47:36 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:47:54 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:47:54 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 16:47:54 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:47:54 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi null +2016-04-07 16:47:54 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:47:54 DEBUG ASLSession:311 - Scope is null, returning null +2016-04-07 16:47:54 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=null, scopeName=null, userEmailAddress=null, userFullName=null] +2016-04-07 16:47:56 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:47:56 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 16:47:56 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 16:47:56 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi null +2016-04-07 16:47:56 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 16:47:57 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 16:47:57 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 17:02:13 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 17:02:13 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 17:02:13 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 17:02:13 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 17:02:13 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 17:02:13 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 17:02:13 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 17:02:13 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@5cb02ec6 +2016-04-07 17:02:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 17:02:13 INFO ASLSession:352 - Logging the entrance +2016-04-07 17:02:13 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 17:02:13 DEBUG TemplateModel:83 - 2016-04-07 17:02:13, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 17:02:13 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 17:02:13 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 17:02:16 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 17:02:16 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 17:02:16 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 17:02:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 17:02:16 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 17:02:16 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 17:02:16 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 17:02:17 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 132 ms +2016-04-07 17:02:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 17:02:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 17:02:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 17:02:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 17:02:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 17:02:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 17:02:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 17:02:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 17:02:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 17:02:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 17:02:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 17:02:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 17:02:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 17:02:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 17:02:17 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 17:02:17 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 17:02:17 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 17:02:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6765ecb1 +2016-04-07 17:02:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@35c1fc4a +2016-04-07 17:02:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6af14528 +2016-04-07 17:02:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@5ebe7dc2 +2016-04-07 17:02:17 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 135 ms +2016-04-07 17:02:17 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 17:02:17 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 17:02:17 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 17:02:17 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 17:02:17 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 17:02:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 17:02:17 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 17:02:17 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 17:02:17 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 32 ms +2016-04-07 17:02:17 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 17:02:17 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 17:02:17 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 17:02:17 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 17:02:19 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 17:02:19 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 17:03:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 17:03:08 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 17:04:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 17:04:03 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 17:04:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 17:04:58 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 17:05:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 17:05:53 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 17:06:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 17:06:48 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 17:07:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 17:07:43 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 17:08:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 17:08:38 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 17:09:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 17:09:33 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 17:10:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 17:10:28 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 17:11:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 17:11:23 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 17:12:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 17:12:18 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 17:13:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 17:13:13 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 17:14:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 17:14:08 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 17:15:23 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 17:15:23 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 17:15:23 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 17:15:23 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 17:15:23 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 17:15:23 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 17:15:23 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 17:15:23 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@1162f174 +2016-04-07 17:15:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 17:15:23 INFO ASLSession:352 - Logging the entrance +2016-04-07 17:15:23 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 17:15:23 DEBUG TemplateModel:83 - 2016-04-07 17:15:23, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 17:15:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 17:15:23 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 17:15:27 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 17:15:27 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 17:15:27 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 17:15:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 17:15:27 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 17:15:27 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 17:15:27 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 17:15:27 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 127 ms +2016-04-07 17:15:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 17:15:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 17:15:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 17:15:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 17:15:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 17:15:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 17:15:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 17:15:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 17:15:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 17:15:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 17:15:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 17:15:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 17:15:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 17:15:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 17:15:27 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 17:15:27 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 17:15:27 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 17:15:27 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6f6d9255 +2016-04-07 17:15:27 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@63d8fdbe +2016-04-07 17:15:27 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@2946f76d +2016-04-07 17:15:27 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@371ac20d +2016-04-07 17:15:27 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 119 ms +2016-04-07 17:15:27 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 17:15:27 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 17:15:27 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 17:15:27 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 17:15:27 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 17:15:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 17:15:27 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 17:15:27 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 17:15:27 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 19 ms +2016-04-07 17:15:27 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 17:15:27 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 17:15:27 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 17:15:27 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 17:15:29 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 17:15:29 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 17:16:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 17:16:18 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 17:17:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 17:17:13 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 17:18:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 17:18:08 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 17:19:04 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 17:19:04 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 17:19:04 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 17:19:05 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 17:19:05 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 17:19:05 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 17:19:05 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 17:19:05 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@56662b1 +2016-04-07 17:19:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 17:19:05 INFO ASLSession:352 - Logging the entrance +2016-04-07 17:19:05 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 17:19:05 DEBUG TemplateModel:83 - 2016-04-07 17:19:05, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 17:19:05 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 17:19:05 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 17:19:08 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 17:19:08 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 17:19:08 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 17:19:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 17:19:08 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 17:19:08 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 17:19:08 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 17:19:08 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 113 ms +2016-04-07 17:19:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 17:19:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 17:19:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 17:19:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 17:19:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 17:19:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 17:19:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 17:19:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 17:19:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 17:19:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 17:19:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 17:19:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 17:19:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 17:19:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 17:19:08 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 17:19:08 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 17:19:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 17:19:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@ad9d5da +2016-04-07 17:19:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@13b5bee0 +2016-04-07 17:19:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@67ea92eb +2016-04-07 17:19:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@28871950 +2016-04-07 17:19:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 119 ms +2016-04-07 17:19:09 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 17:19:09 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 17:19:09 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 17:19:09 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 17:19:09 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 17:19:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 17:19:09 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 17:19:09 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 17:19:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-07 17:19:09 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 17:19:09 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 17:19:09 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 17:19:09 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 17:19:10 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 17:19:10 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 17:19:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 17:19:56 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 17:19:56 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 17:19:56 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-07 17:19:56 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 17:19:56 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 17:19:57 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 17:19:57 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 17:19:57 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 17:19:57 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 17:19:57 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 17:19:57 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-07 17:19:57 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-07 17:19:57 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 17:19:57 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 17:19:57 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 17:19:57 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 17:19:57 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 17:19:57 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 17:19:57 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 17:19:57 DEBUG WPS2SM:201 - Schema: null +2016-04-07 17:19:57 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 17:19:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 17:19:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 17:19:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 17:19:57 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 17:19:57 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 17:19:57 DEBUG WPS2SM:93 - WPS type: +2016-04-07 17:19:57 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 17:19:57 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 17:19:57 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 17:19:57 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 17:19:57 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 17:19:57 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 17:19:57 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 17:19:57 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 17:19:57 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 17:19:57 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 17:19:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 17:19:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 17:19:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 17:19:57 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 17:19:57 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 17:19:57 DEBUG WPS2SM:93 - WPS type: +2016-04-07 17:19:57 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 17:19:57 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 17:19:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 17:19:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 17:19:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 17:19:57 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 17:19:57 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-07 17:19:57 DEBUG WPS2SM:93 - WPS type: +2016-04-07 17:19:57 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 17:19:57 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 17:19:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-07 17:19:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-07 17:19:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 17:19:57 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 17:19:57 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 17:19:57 DEBUG WPS2SM:93 - WPS type: +2016-04-07 17:19:57 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 17:19:57 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 17:19:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-07 17:19:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 17:19:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 17:19:57 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 17:19:57 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-07 17:19:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 17:19:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 17:19:57 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 17:19:57 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 17:19:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 17:19:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 17:19:57 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 17:19:57 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 17:19:57 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 17:19:57 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 17:19:57 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 17:19:57 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 17:19:57 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 17:19:57 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 17:19:57 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 17:19:57 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 17:19:57 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 17:19:57 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 17:19:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 17:19:57 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 17:19:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 17:19:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 146 ms +2016-04-07 17:19:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 17:19:57 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 17:19:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 17:19:57 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 17:19:57 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 17:19:57 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 17:19:57 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 17:19:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 17:19:57 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 17:19:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 17:19:57 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 17:19:57 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 17:19:57 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 17:19:57 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 17:19:57 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 17:19:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 17:19:57 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 17:19:57 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 17:19:57 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 17:19:57 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 17:19:57 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 17:19:57 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 17:19:57 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 17:19:57 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 17:19:57 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 17:19:57 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 17:19:57 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 17:19:57 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 17:19:57 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 17:19:57 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 17:19:58 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 17:19:58 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 17:19:58 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 17:19:58 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 17:19:58 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 17:19:58 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 17:19:58 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 17:19:58 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 17:19:58 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 17:19:58 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 17:19:58 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 17:19:58 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 17:19:58 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 17:19:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 17:19:58 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 17:19:58 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 17:19:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 17:19:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 17:19:58 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 17:19:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 17:19:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 17:19:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 17:19:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 17:19:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 17:19:58 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 17:19:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 17:19:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 17:19:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 17:19:58 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 17:19:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 17:19:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 17:19:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 17:19:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 17:19:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 17:19:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 17:19:58 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 17:19:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 17:19:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 17:19:59 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 17:19:59 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 17:19:59 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 82 ms +2016-04-07 17:19:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 17:19:59 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 17:19:59 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 17:19:59 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-07 17:19:59 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 17:19:59 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 17:19:59 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 17:19:59 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 17:19:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 17:19:59 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 17:19:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 17:19:59 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 17:19:59 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 17:19:59 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-07 17:19:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 17:19:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 17:19:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 17:19:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 17:19:59 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 17:19:59 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 17:19:59 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 17:19:59 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 17:19:59 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 17:19:59 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 17:19:59 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 17:19:59 INFO WorkspaceExplorerServiceImpl:142 - end time - 875 msc 0 sec +2016-04-07 17:19:59 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 17:20:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 17:20:00 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 17:20:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 17:20:55 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 17:21:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 17:21:50 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 17:22:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 17:22:45 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 17:23:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 17:23:00 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 17:23:00 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 17:23:00 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-07 17:23:00 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 17:23:00 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 17:23:01 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 17:23:01 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 17:23:01 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 17:23:01 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 17:23:01 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 17:23:01 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-07 17:23:01 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-07 17:23:01 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-07 17:23:01 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-07 17:23:01 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 17:23:01 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 17:23:01 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 17:23:01 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 17:23:01 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 17:23:01 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 17:23:01 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 17:23:01 DEBUG WPS2SM:201 - Schema: null +2016-04-07 17:23:01 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 17:23:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 17:23:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 17:23:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 17:23:01 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 17:23:01 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 17:23:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 17:23:01 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 17:23:01 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 17:23:01 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 17:23:01 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 17:23:01 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 17:23:01 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 17:23:01 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 17:23:01 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 17:23:01 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 17:23:01 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 17:23:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 17:23:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 17:23:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 17:23:01 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 17:23:01 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 17:23:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 17:23:01 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 17:23:01 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 17:23:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 17:23:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 17:23:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 17:23:01 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 17:23:01 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-07 17:23:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 17:23:01 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 17:23:01 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-07 17:23:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-07 17:23:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-07 17:23:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 17:23:01 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-07 17:23:01 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-07 17:23:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 17:23:01 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 17:23:01 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 17:23:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-07 17:23:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-07 17:23:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 17:23:01 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 17:23:01 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-07 17:23:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 17:23:01 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 17:23:01 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-07 17:23:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-07 17:23:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-07 17:23:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 17:23:01 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-07 17:23:01 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 17:23:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 17:23:01 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 17:23:01 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-07 17:23:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-07 17:23:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 17:23:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 17:23:01 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-07 17:23:01 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-07 17:23:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 17:23:01 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 17:23:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 17:23:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 17:23:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 17:23:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 17:23:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 17:23:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 17:23:01 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 17:23:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 17:23:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 17:23:01 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 17:23:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 17:23:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 17:23:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 17:23:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 17:23:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 17:23:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 17:23:01 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 17:23:01 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 17:23:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 17:23:01 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 17:23:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 17:23:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 17:23:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 17:23:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 17:23:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 17:23:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 17:23:01 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 17:23:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 17:23:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 17:23:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 17:23:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 17:23:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 17:23:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 17:23:01 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 17:23:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 17:23:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 17:23:01 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 17:23:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 17:23:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 17:23:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 17:23:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 17:23:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 17:23:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 17:23:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 17:23:01 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 17:23:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 17:23:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 17:23:01 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 17:23:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 17:23:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 17:23:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 17:23:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 17:23:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 17:23:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 17:23:01 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 17:23:01 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 17:23:01 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 17:23:01 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 17:23:01 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 17:23:01 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 17:23:01 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 17:23:01 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 17:23:01 INFO WorkspaceExplorerServiceImpl:142 - end time - 415 msc 0 sec +2016-04-07 17:23:01 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 17:23:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 17:23:40 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 17:24:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 17:24:35 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 17:25:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 17:25:30 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 17:26:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 17:26:25 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 17:27:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 17:27:20 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 17:28:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 17:28:15 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 17:29:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 17:29:10 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 17:30:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 17:30:05 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 17:31:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 17:31:00 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 17:31:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 17:31:07 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 17:31:07 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 17:31:07 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 17:31:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 17:31:09 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 17:31:09 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 17:31:09 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 17:31:10 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 17:31:10 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 17:32:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 17:32:02 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 17:32:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 17:32:57 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 17:33:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 17:33:52 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 17:34:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 17:34:47 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 17:35:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 17:35:42 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 17:36:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 17:36:37 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 17:37:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 17:37:32 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 17:38:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 17:38:27 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 17:39:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 17:39:22 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 17:40:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 17:40:17 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 17:41:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 17:41:12 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 17:42:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 17:42:07 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 17:43:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 17:43:02 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 17:43:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 17:43:57 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 17:44:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 17:44:52 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 17:45:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 17:45:47 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 17:46:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 17:46:42 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 17:47:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 17:47:37 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 17:48:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 17:48:32 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 17:49:27 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 17:49:27 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 17:49:27 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 17:49:27 WARN SessionCheckerServiceImpl:80 - Scope is null at Thu Apr 07 17:49:27 CEST 2016 +2016-04-07 17:49:27 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 17:54:05 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 17:54:05 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 17:54:05 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 18:00:31 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 18:00:31 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 18:00:31 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 18:00:31 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 18:00:31 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 18:00:31 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 18:00:31 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 18:00:31 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@9057788 +2016-04-07 18:00:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 18:00:31 INFO ASLSession:352 - Logging the entrance +2016-04-07 18:00:31 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 18:00:31 DEBUG TemplateModel:83 - 2016-04-07 18:00:31, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 18:00:31 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 18:00:31 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 18:03:28 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 18:03:28 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 18:03:28 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 18:03:28 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 18:03:28 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 18:03:28 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 18:03:28 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 18:03:28 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@41cda15e +2016-04-07 18:03:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 18:03:28 INFO ASLSession:352 - Logging the entrance +2016-04-07 18:03:28 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 18:03:28 DEBUG TemplateModel:83 - 2016-04-07 18:03:28, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 18:03:28 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 18:03:28 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 18:03:46 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 18:03:46 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 18:03:46 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 18:03:46 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 18:03:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 18:03:46 INFO ASLSession:352 - Logging the entrance +2016-04-07 18:03:46 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 18:03:46 DEBUG TemplateModel:83 - 2016-04-07 18:03:46, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 18:03:46 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 18:03:46 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 18:04:08 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 18:04:08 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 18:04:08 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 18:04:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 18:04:08 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 18:04:08 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 18:04:08 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 18:04:08 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 123 ms +2016-04-07 18:04:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 18:04:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 18:04:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 18:04:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 18:04:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 18:04:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 18:04:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 18:04:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 18:04:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 18:04:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 18:04:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 18:04:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 18:04:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 18:04:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 18:04:09 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 18:04:09 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 18:04:09 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 18:04:09 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@7d228b86 +2016-04-07 18:04:09 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@5d040b3 +2016-04-07 18:04:09 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@2dc15102 +2016-04-07 18:04:09 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@738133d6 +2016-04-07 18:04:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 212 ms +2016-04-07 18:04:10 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 18:04:10 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 18:04:10 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 18:04:10 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 18:04:10 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 18:04:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 18:04:10 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 18:04:10 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 18:04:10 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-07 18:04:10 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 18:04:10 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 18:04:10 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 18:04:10 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 18:04:10 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 18:04:10 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 18:04:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 18:04:41 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 18:05:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 18:05:29 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 18:05:29 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 18:05:29 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-07 18:05:29 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 18:05:29 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 18:05:30 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 18:05:30 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 18:05:30 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 18:05:30 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 18:05:30 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 18:05:30 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-07 18:05:30 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-07 18:05:30 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 18:05:30 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 18:05:30 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 18:05:30 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 18:05:30 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 18:05:30 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 18:05:30 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 18:05:30 DEBUG WPS2SM:201 - Schema: null +2016-04-07 18:05:30 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 18:05:30 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 18:05:30 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 18:05:30 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 18:05:30 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 18:05:30 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 18:05:30 DEBUG WPS2SM:93 - WPS type: +2016-04-07 18:05:30 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 18:05:30 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 18:05:30 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 18:05:30 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 18:05:30 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 18:05:30 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 18:05:30 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 18:05:30 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 18:05:30 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 18:05:30 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 18:05:30 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 18:05:30 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 18:05:30 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 18:05:30 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 18:05:30 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 18:05:30 DEBUG WPS2SM:93 - WPS type: +2016-04-07 18:05:30 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 18:05:30 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 18:05:30 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 18:05:30 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 18:05:30 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 18:05:30 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 18:05:30 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-07 18:05:30 DEBUG WPS2SM:93 - WPS type: +2016-04-07 18:05:30 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 18:05:30 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 18:05:30 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-07 18:05:30 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-07 18:05:30 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 18:05:30 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 18:05:30 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 18:05:30 DEBUG WPS2SM:93 - WPS type: +2016-04-07 18:05:30 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 18:05:30 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 18:05:30 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-07 18:05:30 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 18:05:30 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 18:05:30 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 18:05:30 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-07 18:05:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 18:05:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 18:05:30 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 18:05:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 18:05:30 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 18:05:30 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 18:05:30 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 18:05:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 18:05:30 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 18:05:30 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 18:05:30 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 18:05:30 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 18:05:30 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 18:05:30 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 18:05:30 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 18:05:30 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 18:05:30 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 18:05:30 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 18:05:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 18:05:30 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 18:05:30 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 18:05:30 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-07 18:05:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 18:05:31 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 18:05:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 18:05:31 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 18:05:31 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 18:05:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 18:05:31 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 18:05:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 18:05:31 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 18:05:31 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 18:05:31 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 18:05:31 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 18:05:31 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 18:05:31 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 18:05:31 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 18:05:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 18:05:31 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 18:05:31 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 18:05:31 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 18:05:31 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 18:05:31 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 18:05:31 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 18:05:31 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 18:05:31 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 18:05:31 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 18:05:31 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 18:05:31 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 18:05:31 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 18:05:31 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 18:05:31 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 18:05:31 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 18:05:31 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 18:05:31 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 18:05:31 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 18:05:31 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 18:05:31 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 18:05:31 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 18:05:31 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 18:05:31 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 18:05:31 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 18:05:31 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 18:05:31 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 18:05:31 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 18:05:31 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 18:05:31 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 18:05:31 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 18:05:31 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 18:05:31 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 18:05:31 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 18:05:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 18:05:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 18:05:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 18:05:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 18:05:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 18:05:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 18:05:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 18:05:32 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 18:05:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 18:05:32 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 18:05:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 18:05:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 18:05:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 18:05:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 18:05:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 18:05:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 18:05:32 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 18:05:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 18:05:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 18:05:32 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 18:05:32 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 18:05:32 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 31 ms +2016-04-07 18:05:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 18:05:32 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 18:05:32 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 18:05:32 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 15 ms +2016-04-07 18:05:32 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 18:05:32 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 18:05:32 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 18:05:32 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 18:05:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 18:05:32 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 18:05:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 18:05:32 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 18:05:32 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 18:05:32 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-07 18:05:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 18:05:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 18:05:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 18:05:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 18:05:32 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 18:05:32 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 18:05:32 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 18:05:32 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 18:05:32 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 18:05:32 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 18:05:32 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 18:05:32 INFO WorkspaceExplorerServiceImpl:142 - end time - 667 msc 0 sec +2016-04-07 18:05:32 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 18:05:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 18:05:36 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 18:06:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 18:06:31 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 18:07:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 18:07:03 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 18:07:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 18:07:03 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-07 18:07:03 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 18:07:03 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 18:07:03 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 18:07:03 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 18:07:03 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 18:07:03 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 18:07:03 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 18:07:03 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-07 18:07:03 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-07 18:07:03 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-07 18:07:03 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-07 18:07:03 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 18:07:03 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 18:07:03 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 18:07:03 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 18:07:03 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 18:07:03 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 18:07:03 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 18:07:03 DEBUG WPS2SM:201 - Schema: null +2016-04-07 18:07:03 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 18:07:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 18:07:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 18:07:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 18:07:03 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 18:07:03 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 18:07:03 DEBUG WPS2SM:93 - WPS type: +2016-04-07 18:07:03 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 18:07:03 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 18:07:03 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 18:07:03 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 18:07:03 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 18:07:03 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 18:07:03 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 18:07:03 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 18:07:03 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 18:07:03 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 18:07:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 18:07:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 18:07:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 18:07:03 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 18:07:03 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 18:07:03 DEBUG WPS2SM:93 - WPS type: +2016-04-07 18:07:03 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 18:07:03 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 18:07:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 18:07:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 18:07:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 18:07:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 18:07:03 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-07 18:07:03 DEBUG WPS2SM:93 - WPS type: +2016-04-07 18:07:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 18:07:03 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-07 18:07:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-07 18:07:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-07 18:07:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 18:07:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-07 18:07:03 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-07 18:07:03 DEBUG WPS2SM:93 - WPS type: +2016-04-07 18:07:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 18:07:03 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 18:07:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-07 18:07:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-07 18:07:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 18:07:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 18:07:03 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-07 18:07:03 DEBUG WPS2SM:93 - WPS type: +2016-04-07 18:07:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 18:07:03 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-07 18:07:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-07 18:07:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-07 18:07:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 18:07:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-07 18:07:03 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 18:07:03 DEBUG WPS2SM:93 - WPS type: +2016-04-07 18:07:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 18:07:03 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-07 18:07:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-07 18:07:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 18:07:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 18:07:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-07 18:07:03 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-07 18:07:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 18:07:03 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 18:07:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 18:07:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 18:07:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 18:07:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 18:07:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 18:07:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 18:07:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 18:07:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 18:07:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 18:07:03 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 18:07:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 18:07:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 18:07:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 18:07:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 18:07:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 18:07:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 18:07:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 18:07:03 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 18:07:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 18:07:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 18:07:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 18:07:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 18:07:03 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 18:07:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 18:07:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 18:07:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 18:07:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 18:07:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 18:07:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 18:07:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 18:07:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 18:07:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 18:07:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 18:07:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 18:07:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 18:07:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 18:07:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 18:07:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 18:07:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 18:07:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 18:07:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 18:07:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 18:07:03 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 18:07:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 18:07:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 18:07:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 18:07:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 18:07:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 18:07:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 18:07:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 18:07:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 18:07:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 18:07:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 18:07:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 18:07:04 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 18:07:04 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 18:07:04 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 18:07:04 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 18:07:04 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 18:07:04 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 18:07:04 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 18:07:04 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 18:07:04 INFO WorkspaceExplorerServiceImpl:142 - end time - 240 msc 0 sec +2016-04-07 18:07:04 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 18:07:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 18:07:26 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 19:25:59 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 19:25:59 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 19:25:59 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 19:25:59 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 19:25:59 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 19:25:59 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:25:59 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 19:25:59 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@72d6f12b +2016-04-07 19:25:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:25:59 INFO ASLSession:352 - Logging the entrance +2016-04-07 19:25:59 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 19:25:59 DEBUG TemplateModel:83 - 2016-04-07 19:25:59, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 19:25:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:25:59 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 19:26:03 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 19:26:03 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 19:26:03 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 19:26:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:26:03 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:26:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:26:03 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:26:03 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 166 ms +2016-04-07 19:26:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 19:26:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 19:26:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 19:26:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 19:26:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 19:26:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 19:26:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 19:26:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 19:26:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 19:26:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 19:26:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 19:26:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 19:26:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 19:26:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 19:26:03 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 19:26:03 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:26:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:26:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@45a4de4b +2016-04-07 19:26:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@c348ccd +2016-04-07 19:26:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@1a732afd +2016-04-07 19:26:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@6f275480 +2016-04-07 19:26:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 96 ms +2016-04-07 19:26:04 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 19:26:04 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 19:26:04 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 19:26:04 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 19:26:04 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 19:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:26:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:26:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 19:26:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 25 ms +2016-04-07 19:26:04 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 19:26:04 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:26:04 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 19:26:04 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:26:05 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:26:05 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 19:26:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:26:15 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 19:26:15 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:26:15 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-07 19:26:15 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:26:15 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:26:16 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 19:26:16 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 19:26:16 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 19:26:16 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 19:26:16 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 19:26:16 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-07 19:26:16 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-07 19:26:16 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 19:26:16 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 19:26:16 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 19:26:16 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:26:16 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 19:26:16 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 19:26:16 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 19:26:16 DEBUG WPS2SM:201 - Schema: null +2016-04-07 19:26:16 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 19:26:16 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 19:26:16 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 19:26:16 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:26:16 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 19:26:16 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 19:26:16 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:26:16 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:26:16 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 19:26:16 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 19:26:16 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 19:26:16 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 19:26:16 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 19:26:16 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 19:26:16 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 19:26:16 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 19:26:16 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 19:26:16 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 19:26:16 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 19:26:16 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:26:16 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 19:26:16 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 19:26:16 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:26:16 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:26:16 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 19:26:16 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 19:26:16 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 19:26:16 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:26:16 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 19:26:16 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-07 19:26:16 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:26:16 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:26:16 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 19:26:16 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-07 19:26:16 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-07 19:26:16 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:26:16 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 19:26:16 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 19:26:16 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:26:16 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:26:16 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 19:26:16 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-07 19:26:16 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 19:26:16 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:26:16 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 19:26:16 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-07 19:26:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 19:26:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:26:16 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 19:26:16 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:26:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:26:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 19:26:16 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:26:16 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:26:16 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:26:16 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:26:16 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:26:16 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:26:16 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:26:16 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:26:16 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:26:16 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 19:26:16 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 19:26:16 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 19:26:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 19:26:16 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:26:16 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 19:26:16 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-07 19:26:16 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 19:26:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:26:16 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:26:16 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:26:16 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:26:16 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:26:16 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 19:26:16 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:26:16 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 19:26:16 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:26:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:26:16 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:26:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:26:16 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:26:16 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:26:16 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:26:16 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:26:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:26:16 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 19:26:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:26:16 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:26:16 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:26:16 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:26:16 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:26:17 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:26:17 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:26:17 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 19:26:17 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:26:17 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:26:17 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 19:26:17 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 19:26:17 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:26:17 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:26:17 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:26:17 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:26:17 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:26:17 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:26:17 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:26:17 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 19:26:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:26:17 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:26:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:26:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:26:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:26:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:26:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 19:26:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:26:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:26:17 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:26:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:26:17 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:26:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:26:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:26:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:26:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:26:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:26:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:26:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:26:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:26:17 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 19:26:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:26:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:26:17 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:26:17 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:26:17 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 30 ms +2016-04-07 19:26:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:26:17 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:26:17 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 19:26:17 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-07 19:26:17 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 19:26:17 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 19:26:17 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 19:26:17 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 19:26:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:26:17 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 19:26:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:26:17 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:26:17 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:26:17 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-07 19:26:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:26:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:26:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:26:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:26:17 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 19:26:17 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 19:26:17 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 19:26:17 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:26:17 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 19:26:17 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:26:17 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 19:26:17 INFO WorkspaceExplorerServiceImpl:142 - end time - 589 msc 0 sec +2016-04-07 19:26:17 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 19:26:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:26:34 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 19:26:34 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:26:34 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-07 19:26:34 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:26:34 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:26:35 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 19:26:35 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 19:26:35 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 19:26:35 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 19:26:35 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 19:26:35 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-07 19:26:35 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-07 19:26:35 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-07 19:26:35 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-07 19:26:35 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 19:26:35 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 19:26:35 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 19:26:35 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:26:35 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 19:26:35 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 19:26:35 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 19:26:35 DEBUG WPS2SM:201 - Schema: null +2016-04-07 19:26:35 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 19:26:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 19:26:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 19:26:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:26:35 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 19:26:35 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 19:26:35 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:26:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:26:35 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 19:26:35 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 19:26:35 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 19:26:35 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 19:26:35 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 19:26:35 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 19:26:35 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 19:26:35 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 19:26:35 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 19:26:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 19:26:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 19:26:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:26:35 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 19:26:35 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 19:26:35 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:26:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:26:35 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 19:26:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 19:26:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 19:26:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:26:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 19:26:35 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-07 19:26:35 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:26:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:26:35 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-07 19:26:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-07 19:26:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-07 19:26:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:26:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-07 19:26:35 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-07 19:26:35 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:26:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:26:35 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 19:26:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-07 19:26:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-07 19:26:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:26:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 19:26:35 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-07 19:26:35 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:26:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:26:35 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-07 19:26:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-07 19:26:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-07 19:26:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:26:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-07 19:26:35 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 19:26:35 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:26:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:26:35 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-07 19:26:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-07 19:26:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 19:26:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:26:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-07 19:26:35 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-07 19:26:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:26:35 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:26:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:26:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:26:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:26:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:26:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:26:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:26:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:26:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:26:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:26:35 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 19:26:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:26:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:26:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:26:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:26:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:26:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:26:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:26:35 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 19:26:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 19:26:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:26:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:26:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:26:35 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:26:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:26:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:26:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:26:35 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 19:26:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:26:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:26:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:26:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:26:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:26:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:26:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:26:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:26:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:26:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:26:35 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:26:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:26:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:26:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:26:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:26:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:26:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:26:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:26:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:26:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:26:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:26:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:26:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:26:35 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:26:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:26:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:26:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:26:35 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 19:26:35 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 19:26:35 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 19:26:35 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 19:26:35 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:26:35 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 19:26:35 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:26:35 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 19:26:35 INFO WorkspaceExplorerServiceImpl:142 - end time - 243 msc 0 sec +2016-04-07 19:26:35 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 19:26:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:26:54 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 19:27:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:27:49 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 19:28:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 19:28:44 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 19:29:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 19:29:39 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 19:30:47 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 19:30:47 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 19:30:47 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 19:30:47 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 19:30:47 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 19:30:47 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 19:30:47 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 19:30:47 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7973c2b5 +2016-04-07 19:30:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:30:47 INFO ASLSession:352 - Logging the entrance +2016-04-07 19:30:47 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 19:30:47 DEBUG TemplateModel:83 - 2016-04-07 19:30:47, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 19:30:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:30:47 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 19:30:54 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 19:30:54 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 19:30:54 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 19:30:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:30:54 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 19:30:54 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:30:54 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:30:54 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 123 ms +2016-04-07 19:30:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 19:30:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 19:30:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 19:30:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 19:30:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 19:30:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 19:30:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 19:30:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 19:30:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 19:30:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 19:30:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 19:30:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 19:30:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 19:30:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 19:30:54 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 19:30:54 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:30:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:30:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@359d333e +2016-04-07 19:30:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@6cea3a80 +2016-04-07 19:30:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@4b2aeaa7 +2016-04-07 19:30:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@3d2ae577 +2016-04-07 19:30:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 109 ms +2016-04-07 19:30:55 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 19:30:55 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 19:30:55 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 19:30:55 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 19:30:55 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 19:30:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:30:55 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:30:55 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 19:30:55 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 25 ms +2016-04-07 19:30:55 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 19:30:55 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:30:55 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 19:30:55 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:30:55 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:30:55 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 19:31:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:31:02 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:31:02 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:31:02 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-07 19:31:02 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:31:02 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:31:03 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 19:31:03 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 19:31:03 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 19:31:03 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 19:31:03 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 19:31:03 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-07 19:31:03 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-07 19:31:03 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 19:31:03 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 19:31:03 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 19:31:03 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:31:03 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 19:31:03 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 19:31:03 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 19:31:03 DEBUG WPS2SM:201 - Schema: null +2016-04-07 19:31:03 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 19:31:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 19:31:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 19:31:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:31:03 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 19:31:03 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 19:31:03 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:31:03 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:31:03 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 19:31:03 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 19:31:03 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 19:31:03 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 19:31:03 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 19:31:03 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 19:31:03 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 19:31:03 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 19:31:03 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 19:31:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 19:31:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 19:31:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:31:03 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 19:31:03 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 19:31:03 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:31:03 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:31:03 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 19:31:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 19:31:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 19:31:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:31:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 19:31:03 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-07 19:31:03 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:31:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:31:03 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 19:31:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-07 19:31:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-07 19:31:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:31:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 19:31:03 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 19:31:03 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:31:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:31:03 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 19:31:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-07 19:31:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 19:31:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:31:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 19:31:03 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-07 19:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:31:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 19:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:31:03 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 19:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:31:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:31:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:31:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:31:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:31:03 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:31:03 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:31:03 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:31:03 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 19:31:03 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:31:03 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:31:03 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 19:31:03 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 19:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 19:31:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:31:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 19:31:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-07 19:31:03 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 19:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:31:03 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:31:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:31:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:31:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:31:03 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 19:31:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:31:03 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 19:31:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:31:03 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:31:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:31:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:31:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:31:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:31:03 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 19:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:31:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:31:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:31:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:31:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:31:04 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:31:04 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:31:04 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 19:31:04 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:31:04 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:31:04 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 19:31:04 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 19:31:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:31:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:31:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:31:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:31:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:31:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:31:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:31:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:31:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:31:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:31:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:31:04 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 19:31:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:31:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:31:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:31:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 19:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:31:04 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:31:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:31:04 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 19:31:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:31:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:31:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:31:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:31:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:31:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:31:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:31:04 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 19:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 19:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:31:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:31:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:31:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 21 ms +2016-04-07 19:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 19:31:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:31:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 19:31:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-07 19:31:04 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 19:31:04 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 19:31:04 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 19:31:04 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 19:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 19:31:04 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 19:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 19:31:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:31:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:31:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 27 ms +2016-04-07 19:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 19:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 19:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 19:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:31:04 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 19:31:04 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 19:31:04 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 19:31:04 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:31:04 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 19:31:04 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:31:04 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 19:31:04 INFO WorkspaceExplorerServiceImpl:142 - end time - 415 msc 0 sec +2016-04-07 19:31:04 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 19:31:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:31:42 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 19:32:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:32:37 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 19:33:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:33:32 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 19:34:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:34:27 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:34:57 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 19:34:57 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 19:34:57 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 19:34:57 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 19:34:57 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 19:34:57 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:34:57 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 19:34:57 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@316fadc6 +2016-04-07 19:34:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:34:57 INFO ASLSession:352 - Logging the entrance +2016-04-07 19:34:57 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 19:34:57 DEBUG TemplateModel:83 - 2016-04-07 19:34:57, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 19:34:57 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:34:57 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 19:35:01 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 19:35:01 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 19:35:01 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 19:35:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:35:01 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:35:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:35:01 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:35:01 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 120 ms +2016-04-07 19:35:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 19:35:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 19:35:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 19:35:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 19:35:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 19:35:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 19:35:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 19:35:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 19:35:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 19:35:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 19:35:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 19:35:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 19:35:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 19:35:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 19:35:01 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 19:35:01 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:35:01 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:35:01 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@4b136613 +2016-04-07 19:35:01 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1d12cfe5 +2016-04-07 19:35:01 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6131e8c6 +2016-04-07 19:35:01 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@2fcd5dba +2016-04-07 19:35:01 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 101 ms +2016-04-07 19:35:02 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 19:35:02 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 19:35:02 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 19:35:02 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 19:35:02 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 19:35:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:35:02 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:35:02 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 19:35:02 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 21 ms +2016-04-07 19:35:02 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 19:35:02 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:35:02 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 19:35:02 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:35:02 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:35:02 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 19:35:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:35:11 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:35:11 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:35:11 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-07 19:35:11 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:35:11 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:35:11 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 19:35:11 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 19:35:11 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 19:35:11 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 19:35:11 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 19:35:11 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-07 19:35:11 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-07 19:35:11 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 19:35:11 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 19:35:11 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 19:35:11 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:35:11 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 19:35:11 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 19:35:11 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 19:35:11 DEBUG WPS2SM:201 - Schema: null +2016-04-07 19:35:11 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 19:35:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 19:35:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 19:35:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:35:11 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 19:35:11 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 19:35:11 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:35:11 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:35:11 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 19:35:11 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 19:35:11 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 19:35:11 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 19:35:11 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 19:35:11 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 19:35:11 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 19:35:11 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 19:35:11 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 19:35:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 19:35:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 19:35:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:35:11 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 19:35:11 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 19:35:11 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:35:11 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:35:11 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 19:35:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 19:35:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 19:35:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:35:11 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 19:35:11 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-07 19:35:11 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:35:11 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:35:11 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 19:35:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-07 19:35:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-07 19:35:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:35:11 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 19:35:11 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 19:35:11 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:35:11 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:35:11 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 19:35:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-07 19:35:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 19:35:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:35:11 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 19:35:11 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-07 19:35:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:35:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:35:12 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 19:35:12 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:35:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:35:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:35:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:35:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:35:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:35:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:35:12 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:35:12 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:35:12 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:35:12 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:35:12 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:35:12 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 19:35:12 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 19:35:12 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 19:35:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 19:35:12 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:35:12 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 19:35:12 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 27 ms +2016-04-07 19:35:12 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 19:35:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:35:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:35:12 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:35:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:35:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:35:12 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 19:35:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:35:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:35:12 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 19:35:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:35:12 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 19:35:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:35:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:35:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:35:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:35:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:35:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:35:12 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 19:35:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:35:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:35:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:35:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:35:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:35:12 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:35:12 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:35:12 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 19:35:12 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:35:12 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:35:12 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 19:35:12 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 19:35:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:35:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:35:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:35:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:35:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:35:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:35:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:35:12 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 19:35:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:35:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:35:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:35:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:35:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:35:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:35:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 19:35:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:35:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:35:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:35:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:35:12 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 19:35:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:35:12 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:35:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:35:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:35:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:35:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:35:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:35:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:35:12 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 19:35:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 19:35:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:35:13 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:35:13 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:35:13 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 24 ms +2016-04-07 19:35:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 19:35:13 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:35:13 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 19:35:13 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-07 19:35:13 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 19:35:13 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 19:35:13 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 19:35:13 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 19:35:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 19:35:13 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 19:35:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 19:35:13 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:35:13 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:35:13 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-07 19:35:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 19:35:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 19:35:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 19:35:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:35:13 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 19:35:13 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 19:35:13 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 19:35:13 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:35:13 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 19:35:13 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:35:13 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 19:35:13 INFO WorkspaceExplorerServiceImpl:142 - end time - 449 msc 0 sec +2016-04-07 19:35:13 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 19:35:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:35:52 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 19:36:52 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 19:36:52 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 19:36:52 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 19:36:52 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 19:36:52 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 19:36:52 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 19:36:52 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 19:36:52 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@4c2fb7f8 +2016-04-07 19:36:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:36:52 INFO ASLSession:352 - Logging the entrance +2016-04-07 19:36:52 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 19:36:52 DEBUG TemplateModel:83 - 2016-04-07 19:36:52, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 19:36:52 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:36:52 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 19:36:56 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 19:36:56 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 19:36:56 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 19:36:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:36:56 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 19:36:56 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:36:56 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:36:56 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 112 ms +2016-04-07 19:36:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 19:36:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 19:36:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 19:36:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 19:36:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 19:36:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 19:36:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 19:36:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 19:36:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 19:36:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 19:36:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 19:36:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 19:36:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 19:36:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 19:36:56 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 19:36:56 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:36:56 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:36:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@16e09b5e +2016-04-07 19:36:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@2a1087ff +2016-04-07 19:36:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@38c628ba +2016-04-07 19:36:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@7c1749f1 +2016-04-07 19:36:56 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 123 ms +2016-04-07 19:36:56 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 19:36:56 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 19:36:56 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 19:36:56 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 19:36:56 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 19:36:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:36:56 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:36:56 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 19:36:56 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-07 19:36:56 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 19:36:56 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:36:56 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 19:36:56 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:36:57 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:36:57 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 19:37:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:37:01 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:37:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:37:01 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-07 19:37:01 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:37:01 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:37:01 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 19:37:01 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 19:37:01 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 19:37:01 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 19:37:01 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 19:37:01 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-07 19:37:01 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-07 19:37:01 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 19:37:01 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 19:37:01 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 19:37:01 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:37:01 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 19:37:01 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 19:37:01 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 19:37:01 DEBUG WPS2SM:201 - Schema: null +2016-04-07 19:37:01 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 19:37:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 19:37:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 19:37:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:37:01 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 19:37:01 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 19:37:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:37:01 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:37:01 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 19:37:01 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 19:37:01 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 19:37:01 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 19:37:01 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 19:37:01 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 19:37:01 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 19:37:01 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 19:37:01 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 19:37:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 19:37:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 19:37:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:37:01 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 19:37:01 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 19:37:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:37:01 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:37:01 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 19:37:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 19:37:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 19:37:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:37:01 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 19:37:01 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-07 19:37:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:37:01 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:37:01 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 19:37:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-07 19:37:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-07 19:37:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:37:01 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 19:37:01 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 19:37:01 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:37:01 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:37:01 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 19:37:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-07 19:37:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 19:37:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:37:01 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 19:37:01 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-07 19:37:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:37:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:37:01 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 19:37:01 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 19:37:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:37:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:37:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:37:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:37:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:37:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:37:01 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:37:01 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:37:01 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:37:01 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:37:01 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:37:01 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 19:37:01 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 19:37:01 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 19:37:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 19:37:01 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:37:01 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 19:37:01 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-07 19:37:02 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 19:37:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:37:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:37:02 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:37:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:37:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:37:02 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 19:37:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:37:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:37:02 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 19:37:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:37:02 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 19:37:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:37:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:37:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:37:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:37:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:37:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:37:02 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 19:37:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:37:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:37:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:37:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:37:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:37:02 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:37:02 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:37:02 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 19:37:02 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:37:02 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:37:02 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 19:37:02 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 19:37:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:37:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:37:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:37:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:37:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:37:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:37:02 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 19:37:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:37:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:37:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:37:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:37:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:37:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:37:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:37:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:37:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 19:37:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:37:02 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 19:37:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:37:02 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 19:37:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:37:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:37:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:37:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:37:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:37:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:37:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:37:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:37:02 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 19:37:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:37:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:37:02 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:37:02 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:37:02 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 24 ms +2016-04-07 19:37:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:37:02 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:37:02 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 19:37:02 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 15 ms +2016-04-07 19:37:02 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 19:37:02 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 19:37:02 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 19:37:02 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 19:37:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:37:02 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 19:37:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:37:02 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:37:02 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:37:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-07 19:37:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:37:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:37:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:37:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:37:03 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 19:37:03 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 19:37:03 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 19:37:03 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:37:03 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 19:37:03 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:37:03 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 19:37:03 INFO WorkspaceExplorerServiceImpl:142 - end time - 461 msc 0 sec +2016-04-07 19:37:03 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 19:37:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:37:47 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 19:38:33 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 19:38:33 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 19:38:33 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 19:38:33 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 19:38:33 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 19:38:33 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:38:33 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 19:38:33 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@14da2d57 +2016-04-07 19:38:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:38:33 INFO ASLSession:352 - Logging the entrance +2016-04-07 19:38:33 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 19:38:33 DEBUG TemplateModel:83 - 2016-04-07 19:38:33, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 19:38:33 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:38:33 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 19:38:37 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 19:38:37 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 19:38:37 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 19:38:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:38:37 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:38:37 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:38:37 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:38:37 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 117 ms +2016-04-07 19:38:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 19:38:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 19:38:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 19:38:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 19:38:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 19:38:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 19:38:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 19:38:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 19:38:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 19:38:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 19:38:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 19:38:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 19:38:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 19:38:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 19:38:37 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 19:38:37 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:38:37 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:38:37 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@5ec859c1 +2016-04-07 19:38:37 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@3fc07d2d +2016-04-07 19:38:37 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@644ffae +2016-04-07 19:38:37 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@785f5667 +2016-04-07 19:38:37 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 113 ms +2016-04-07 19:38:37 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 19:38:37 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 19:38:37 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 19:38:37 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 19:38:37 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 19:38:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:38:37 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:38:37 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 19:38:37 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 28 ms +2016-04-07 19:38:37 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 19:38:37 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:38:37 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 19:38:37 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:38:38 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:38:38 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 19:38:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:38:43 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 19:38:43 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:38:43 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-07 19:38:43 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:38:43 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:38:43 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 19:38:43 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 19:38:43 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 19:38:43 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 19:38:43 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 19:38:43 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-07 19:38:43 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-07 19:38:43 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 19:38:43 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 19:38:43 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 19:38:43 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:38:43 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 19:38:43 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 19:38:43 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 19:38:43 DEBUG WPS2SM:201 - Schema: null +2016-04-07 19:38:43 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 19:38:43 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 19:38:43 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 19:38:43 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:38:43 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 19:38:43 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 19:38:43 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:38:43 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:38:43 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 19:38:43 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 19:38:43 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 19:38:43 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 19:38:43 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 19:38:43 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 19:38:43 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 19:38:43 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 19:38:43 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 19:38:43 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 19:38:43 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 19:38:43 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:38:43 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 19:38:43 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 19:38:43 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:38:43 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:38:43 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 19:38:43 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 19:38:43 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 19:38:43 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:38:43 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 19:38:43 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-07 19:38:43 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:38:43 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:38:43 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 19:38:43 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-07 19:38:43 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-07 19:38:43 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:38:43 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 19:38:43 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 19:38:43 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:38:43 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:38:43 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 19:38:43 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-07 19:38:43 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 19:38:43 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:38:43 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 19:38:43 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-07 19:38:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:38:43 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:38:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:38:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:38:43 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:38:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:38:43 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:38:43 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:38:43 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:38:43 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:38:44 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:38:44 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:38:44 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:38:44 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 19:38:44 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:38:44 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:38:44 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 19:38:44 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 19:38:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 19:38:44 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:38:44 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 19:38:44 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 33 ms +2016-04-07 19:38:44 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 19:38:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:38:44 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:38:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:38:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:38:44 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 19:38:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:38:44 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 19:38:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:38:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:38:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:38:44 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 19:38:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:38:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:38:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:38:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:38:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:38:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:38:44 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 19:38:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:38:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:38:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:38:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:38:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:38:44 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:38:44 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:38:44 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 19:38:44 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:38:44 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:38:44 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 19:38:44 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 19:38:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:38:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:38:44 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:38:44 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:38:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:38:44 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:38:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:38:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:38:44 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:38:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:38:44 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 19:38:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:38:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:38:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:38:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:38:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 19:38:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:38:44 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:38:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:38:44 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 19:38:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:38:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:38:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:38:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:38:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:38:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:38:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:38:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:38:44 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 19:38:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 19:38:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:38:45 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:38:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:38:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 23 ms +2016-04-07 19:38:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 19:38:45 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:38:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 19:38:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-07 19:38:45 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 19:38:45 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 19:38:45 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 19:38:45 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 19:38:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 19:38:45 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 19:38:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 19:38:45 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:38:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:38:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-07 19:38:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 19:38:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 19:38:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 19:38:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:38:45 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 19:38:45 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 19:38:45 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 19:38:45 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:38:45 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 19:38:45 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:38:45 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 19:38:45 INFO WorkspaceExplorerServiceImpl:142 - end time - 439 msc 0 sec +2016-04-07 19:38:45 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 19:39:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:39:28 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:40:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:40:23 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:41:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:41:18 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 19:42:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:42:13 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:43:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:43:08 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:44:18 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 19:44:18 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 19:44:18 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 19:44:18 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 19:44:18 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 19:44:18 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 19:44:18 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 19:44:18 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7eb64276 +2016-04-07 19:44:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:44:18 INFO ASLSession:352 - Logging the entrance +2016-04-07 19:44:18 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 19:44:18 DEBUG TemplateModel:83 - 2016-04-07 19:44:18, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 19:44:18 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:44:18 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 19:44:21 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 19:44:21 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 19:44:21 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 19:44:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:44:21 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 19:44:21 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:44:22 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:44:22 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 124 ms +2016-04-07 19:44:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 19:44:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 19:44:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 19:44:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 19:44:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 19:44:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 19:44:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 19:44:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 19:44:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 19:44:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 19:44:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 19:44:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 19:44:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 19:44:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 19:44:22 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 19:44:22 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:44:22 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:44:22 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@7ea4003 +2016-04-07 19:44:22 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@5abe91b7 +2016-04-07 19:44:22 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@7595a5c +2016-04-07 19:44:22 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@301e126c +2016-04-07 19:44:22 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 118 ms +2016-04-07 19:44:22 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 19:44:22 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 19:44:22 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 19:44:22 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 19:44:22 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 19:44:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:44:22 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:44:22 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 19:44:22 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 26 ms +2016-04-07 19:44:22 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 19:44:22 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:44:22 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 19:44:22 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:44:23 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:44:23 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 19:44:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:44:33 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:44:33 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:44:33 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-07 19:44:33 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:44:33 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:44:33 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 19:44:33 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 19:44:33 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 19:44:33 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 19:44:33 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 19:44:33 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-07 19:44:33 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-07 19:44:33 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 19:44:33 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 19:44:33 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 19:44:33 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:44:33 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 19:44:33 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 19:44:33 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 19:44:33 DEBUG WPS2SM:201 - Schema: null +2016-04-07 19:44:33 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 19:44:33 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 19:44:33 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 19:44:33 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:44:33 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 19:44:33 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 19:44:33 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:44:33 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:44:33 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 19:44:33 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 19:44:33 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 19:44:33 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 19:44:33 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 19:44:33 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 19:44:33 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 19:44:33 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 19:44:33 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 19:44:33 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 19:44:33 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 19:44:33 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:44:33 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 19:44:33 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 19:44:33 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:44:33 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:44:33 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 19:44:33 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 19:44:33 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 19:44:33 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:44:33 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 19:44:33 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-07 19:44:33 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:44:33 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:44:33 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 19:44:33 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-07 19:44:33 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-07 19:44:33 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:44:33 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 19:44:33 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 19:44:33 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:44:33 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:44:33 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 19:44:33 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-07 19:44:33 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 19:44:33 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:44:33 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 19:44:33 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-07 19:44:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:44:34 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 19:44:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:44:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:44:34 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 19:44:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:44:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:44:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:44:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:44:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:44:34 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:44:34 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:44:34 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:44:34 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:44:34 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:44:34 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 19:44:34 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 19:44:34 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 19:44:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 19:44:34 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:44:34 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 19:44:34 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 23 ms +2016-04-07 19:44:34 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 19:44:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:44:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:44:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:44:34 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:44:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:44:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:44:34 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 19:44:34 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:44:34 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 19:44:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:44:34 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:44:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:44:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:44:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:44:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:44:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:44:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:44:34 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 19:44:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:44:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:44:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:44:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:44:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:44:34 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:44:34 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:44:34 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 19:44:34 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:44:34 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:44:34 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 19:44:34 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 19:44:34 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:44:34 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:44:34 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:44:34 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:44:34 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:44:34 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:44:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:44:34 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:44:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:44:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:44:34 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:44:34 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 19:44:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 19:44:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:44:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:44:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:44:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:44:34 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:44:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:44:34 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 19:44:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:44:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:44:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:44:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:44:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:44:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:44:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:44:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:44:35 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 19:44:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:44:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:44:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:44:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:44:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 33 ms +2016-04-07 19:44:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:44:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:44:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 19:44:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-07 19:44:35 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 19:44:35 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 19:44:35 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 19:44:35 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 19:44:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:44:35 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 19:44:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:44:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:44:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:44:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-07 19:44:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:44:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:44:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 19:44:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:44:35 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 19:44:35 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 19:44:35 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 19:44:35 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:44:35 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 19:44:35 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:44:35 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 19:44:35 INFO WorkspaceExplorerServiceImpl:142 - end time - 497 msc 0 sec +2016-04-07 19:44:35 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 19:45:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:45:13 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:45:47 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 19:45:47 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 19:45:47 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 19:45:47 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 19:45:47 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 19:45:47 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:45:47 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 19:45:47 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@331134d9 +2016-04-07 19:45:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:45:47 INFO ASLSession:352 - Logging the entrance +2016-04-07 19:45:47 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 19:45:47 DEBUG TemplateModel:83 - 2016-04-07 19:45:47, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 19:45:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:45:47 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 19:45:50 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 19:45:50 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 19:45:50 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 19:45:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:45:50 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:45:50 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:45:50 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:45:51 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 115 ms +2016-04-07 19:45:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 19:45:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 19:45:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 19:45:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 19:45:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 19:45:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 19:45:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 19:45:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 19:45:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 19:45:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 19:45:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 19:45:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 19:45:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 19:45:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 19:45:51 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 19:45:51 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:45:51 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:45:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@42ce2265 +2016-04-07 19:45:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@6a5205ec +2016-04-07 19:45:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@2553f628 +2016-04-07 19:45:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@3155a123 +2016-04-07 19:45:51 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 99 ms +2016-04-07 19:45:51 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 19:45:51 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 19:45:51 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 19:45:51 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 19:45:51 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 19:45:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:45:51 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:45:51 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 19:45:51 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 26 ms +2016-04-07 19:45:51 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 19:45:51 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:45:51 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 19:45:51 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:45:52 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:45:52 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 19:45:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:45:55 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:45:55 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:45:55 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-07 19:45:55 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:45:55 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:45:56 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 19:45:56 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 19:45:56 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 19:45:56 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 19:45:56 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 19:45:56 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-07 19:45:56 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-07 19:45:56 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 19:45:56 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 19:45:56 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 19:45:56 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:45:56 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 19:45:56 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 19:45:56 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 19:45:56 DEBUG WPS2SM:201 - Schema: null +2016-04-07 19:45:56 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 19:45:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 19:45:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 19:45:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:45:56 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 19:45:56 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 19:45:56 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:45:56 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:45:56 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 19:45:56 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 19:45:56 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 19:45:56 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 19:45:56 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 19:45:56 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 19:45:56 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 19:45:56 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 19:45:56 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 19:45:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 19:45:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 19:45:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:45:56 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 19:45:56 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 19:45:56 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:45:56 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:45:56 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 19:45:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 19:45:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 19:45:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:45:56 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 19:45:56 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-07 19:45:56 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:45:56 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:45:56 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 19:45:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-07 19:45:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-07 19:45:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:45:56 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 19:45:56 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 19:45:56 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:45:56 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:45:56 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 19:45:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-07 19:45:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 19:45:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:45:56 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 19:45:56 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-07 19:45:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:45:56 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 19:45:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:45:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:45:56 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 19:45:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:45:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:45:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:45:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:45:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:45:56 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:45:56 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:45:56 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:45:56 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:45:56 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:45:56 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 19:45:56 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 19:45:56 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 19:45:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 19:45:56 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:45:56 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 19:45:56 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-07 19:45:56 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 19:45:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:45:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:45:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:45:56 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 19:45:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:45:56 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:45:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:45:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:45:56 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 19:45:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:45:56 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 19:45:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:45:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:45:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:45:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:45:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:45:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:45:56 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:45:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:45:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:45:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:45:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:45:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:45:56 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:45:56 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:45:56 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 19:45:56 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:45:56 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:45:56 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 19:45:56 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 19:45:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:45:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:45:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:45:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:45:56 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 19:45:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:45:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:45:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:45:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:45:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:45:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:45:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:45:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:45:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 19:45:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:45:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:45:57 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:45:57 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:45:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:45:57 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 19:45:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:45:57 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 19:45:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:45:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:45:57 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:45:57 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:45:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:45:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:45:57 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 19:45:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 19:45:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:45:57 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:45:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:45:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 32 ms +2016-04-07 19:45:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 19:45:57 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:45:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 19:45:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-07 19:45:57 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 19:45:57 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 19:45:57 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 19:45:57 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 19:45:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 19:45:57 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 19:45:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 19:45:57 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:45:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:45:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-07 19:45:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 19:45:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 19:45:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-07 19:45:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:45:57 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 19:45:57 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 19:45:57 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 19:45:57 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:45:57 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 19:45:57 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:45:57 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 19:45:57 INFO WorkspaceExplorerServiceImpl:142 - end time - 500 msc 0 sec +2016-04-07 19:45:57 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 19:46:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:46:42 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 19:47:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:47:37 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 19:49:21 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 19:49:21 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 19:49:21 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 19:49:21 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 19:49:21 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 19:49:21 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 19:49:21 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 19:49:21 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@5aba1537 +2016-04-07 19:49:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 19:49:21 INFO ASLSession:352 - Logging the entrance +2016-04-07 19:49:21 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 19:49:21 DEBUG TemplateModel:83 - 2016-04-07 19:49:21, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 19:49:21 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:49:21 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 19:49:26 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 19:49:26 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 19:49:26 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 19:49:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:49:26 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 19:49:26 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:49:27 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:49:27 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 115 ms +2016-04-07 19:49:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 19:49:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 19:49:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 19:49:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 19:49:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 19:49:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 19:49:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 19:49:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 19:49:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 19:49:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 19:49:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 19:49:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 19:49:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 19:49:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 19:49:27 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 19:49:27 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:49:27 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:49:27 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@423ad861 +2016-04-07 19:49:27 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@4a202bf6 +2016-04-07 19:49:27 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@16c590ca +2016-04-07 19:49:27 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@5e06765a +2016-04-07 19:49:27 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 103 ms +2016-04-07 19:49:27 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 19:49:27 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 19:49:27 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 19:49:27 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 19:49:27 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 19:49:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:49:27 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:49:27 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 19:49:27 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-07 19:49:27 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 19:49:27 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:49:27 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 19:49:27 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:49:28 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:49:28 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 19:49:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:49:31 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 19:49:31 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:49:31 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-07 19:49:31 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:49:31 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:49:31 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 19:49:31 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 19:49:31 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 19:49:31 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 19:49:31 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 19:49:31 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-07 19:49:31 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-07 19:49:31 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 19:49:31 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 19:49:31 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 19:49:31 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:49:31 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 19:49:32 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 19:49:32 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 19:49:32 DEBUG WPS2SM:201 - Schema: null +2016-04-07 19:49:32 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 19:49:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 19:49:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 19:49:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:49:32 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 19:49:32 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 19:49:32 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:49:32 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:49:32 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 19:49:32 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 19:49:32 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 19:49:32 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 19:49:32 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 19:49:32 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 19:49:32 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 19:49:32 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 19:49:32 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 19:49:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 19:49:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 19:49:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:49:32 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 19:49:32 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 19:49:32 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:49:32 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:49:32 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 19:49:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 19:49:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 19:49:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:49:32 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 19:49:32 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-07 19:49:32 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:49:32 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:49:32 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 19:49:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-07 19:49:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-07 19:49:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:49:32 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 19:49:32 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 19:49:32 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:49:32 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:49:32 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 19:49:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-07 19:49:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 19:49:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:49:32 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 19:49:32 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-07 19:49:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:49:32 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 19:49:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:49:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:49:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:49:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:49:32 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:49:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:49:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:49:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:49:32 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:49:32 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:49:32 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:49:32 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 19:49:32 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:49:32 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:49:32 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 19:49:32 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 19:49:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 19:49:32 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:49:32 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 19:49:32 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 21 ms +2016-04-07 19:49:32 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 19:49:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:49:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:49:32 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:49:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:49:32 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 19:49:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:49:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:49:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:49:32 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 19:49:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:49:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 19:49:32 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:49:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:49:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:49:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:49:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:49:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:49:32 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 19:49:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 19:49:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:49:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:49:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:49:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:49:32 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:49:32 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:49:32 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 19:49:32 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:49:32 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:49:32 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 19:49:32 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 19:49:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:49:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:49:32 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:49:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:49:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:49:32 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:49:32 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:49:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:49:32 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:49:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:49:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:49:32 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 19:49:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:49:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:49:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:49:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 19:49:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:49:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:49:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:49:32 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:49:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:49:32 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:49:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:49:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:49:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:49:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:49:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:49:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:49:33 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 19:49:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-07 19:49:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 19:49:33 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:49:33 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:49:33 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 33 ms +2016-04-07 19:49:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-07 19:49:33 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:49:33 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 19:49:33 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-07 19:49:33 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 19:49:33 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 19:49:33 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 19:49:33 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 19:49:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-07 19:49:33 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 19:49:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-07 19:49:33 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:49:33 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:49:33 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-07 19:49:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-07 19:49:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-07 19:49:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-07 19:49:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 19:49:33 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 19:49:33 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 19:49:33 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 19:49:33 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:49:33 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 19:49:33 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:49:33 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 19:49:33 INFO WorkspaceExplorerServiceImpl:142 - end time - 429 msc 0 sec +2016-04-07 19:49:33 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 19:50:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:50:16 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 19:51:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 19:51:11 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 19:52:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:52:06 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:52:42 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 19:52:42 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 19:52:42 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 19:52:42 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 19:52:42 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 19:52:42 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 19:52:42 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 19:52:42 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@3f73ad16 +2016-04-07 19:52:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:52:42 INFO ASLSession:352 - Logging the entrance +2016-04-07 19:52:42 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 19:52:42 DEBUG TemplateModel:83 - 2016-04-07 19:52:42, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 19:52:42 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:52:42 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 19:52:45 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 19:52:45 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 19:52:45 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 19:52:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:52:45 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 19:52:45 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:52:45 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:52:45 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 126 ms +2016-04-07 19:52:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 19:52:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 19:52:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 19:52:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 19:52:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 19:52:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 19:52:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 19:52:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 19:52:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 19:52:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 19:52:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 19:52:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 19:52:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 19:52:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 19:52:45 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 19:52:45 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:52:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:52:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@4cec170 +2016-04-07 19:52:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@796c21a0 +2016-04-07 19:52:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@120abb03 +2016-04-07 19:52:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@5bef641e +2016-04-07 19:52:46 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 102 ms +2016-04-07 19:52:46 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 19:52:46 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 19:52:46 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 19:52:46 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 19:52:46 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 19:52:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:52:46 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:52:46 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 19:52:46 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 28 ms +2016-04-07 19:52:46 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 19:52:46 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:52:46 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 19:52:46 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:52:47 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:52:47 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 19:52:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:52:50 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:52:50 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:52:50 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-07 19:52:50 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:52:50 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:52:50 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 19:52:50 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 19:52:50 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 19:52:50 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 19:52:50 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 19:52:50 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-07 19:52:50 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-07 19:52:50 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 19:52:50 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 19:52:50 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 19:52:50 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:52:50 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 19:52:50 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 19:52:50 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 19:52:50 DEBUG WPS2SM:201 - Schema: null +2016-04-07 19:52:50 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 19:52:50 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 19:52:50 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 19:52:50 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:52:50 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 19:52:50 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 19:52:50 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:52:50 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:52:50 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 19:52:50 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 19:52:50 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 19:52:50 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 19:52:50 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 19:52:50 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 19:52:50 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 19:52:50 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 19:52:50 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 19:52:50 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 19:52:50 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 19:52:50 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:52:50 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 19:52:50 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 19:52:50 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:52:50 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:52:50 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 19:52:50 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 19:52:50 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 19:52:50 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:52:50 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 19:52:50 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-07 19:52:50 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:52:50 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:52:50 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 19:52:50 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-07 19:52:50 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-07 19:52:50 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:52:50 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 19:52:50 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 19:52:50 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:52:50 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:52:50 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 19:52:50 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-07 19:52:50 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 19:52:50 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:52:50 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 19:52:50 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-07 19:52:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:52:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 19:52:51 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 19:52:51 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-07 19:52:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:52:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 19:52:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:52:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:52:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:52:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:52:51 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:52:51 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:52:51 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:52:51 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:52:51 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:52:51 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 19:52:51 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 19:52:51 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 19:52:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-07 19:52:51 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:52:51 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 19:52:51 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 20 ms +2016-04-07 19:52:51 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 19:52:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-07 19:52:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:52:51 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:52:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:52:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:52:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:52:51 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 19:52:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:52:51 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 19:52:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:52:51 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 19:52:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:52:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:52:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:52:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:52:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:52:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:52:51 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 19:52:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:52:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:52:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:52:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:52:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:52:51 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:52:51 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:52:51 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 19:52:51 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:52:51 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:52:51 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 19:52:51 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 19:52:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:52:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:52:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:52:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:52:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:52:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:52:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:52:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:52:51 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 19:52:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:52:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:52:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:52:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:52:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 19:52:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:52:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:52:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:52:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:52:51 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 19:52:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:52:51 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 19:52:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:52:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:52:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:52:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:52:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:52:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:52:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:52:51 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 19:52:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 19:52:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:52:52 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:52:52 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:52:52 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 29 ms +2016-04-07 19:52:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 19:52:52 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:52:52 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 19:52:52 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-07 19:52:52 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 19:52:52 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 19:52:52 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 19:52:52 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 19:52:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 19:52:52 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 19:52:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 19:52:52 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:52:52 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:52:52 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-07 19:52:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 19:52:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 19:52:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-07 19:52:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 19:52:52 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 19:52:52 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 19:52:52 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 19:52:52 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:52:52 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 19:52:52 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:52:52 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 19:52:52 INFO WorkspaceExplorerServiceImpl:142 - end time - 507 msc 0 sec +2016-04-07 19:52:52 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 19:53:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 19:53:37 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 19:54:59 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 19:54:59 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 19:54:59 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 19:54:59 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 19:54:59 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 19:54:59 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:54:59 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 19:54:59 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@71cb2fba +2016-04-07 19:54:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:54:59 INFO ASLSession:352 - Logging the entrance +2016-04-07 19:54:59 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 19:54:59 DEBUG TemplateModel:83 - 2016-04-07 19:54:59, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 19:54:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:54:59 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 19:55:06 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 19:55:06 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 19:55:06 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 19:55:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:55:06 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 19:55:06 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:55:06 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:55:07 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 117 ms +2016-04-07 19:55:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 19:55:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 19:55:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 19:55:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 19:55:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 19:55:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 19:55:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 19:55:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 19:55:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 19:55:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 19:55:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 19:55:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 19:55:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 19:55:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 19:55:07 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 19:55:07 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:55:07 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 19:55:07 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@2f2038e1 +2016-04-07 19:55:07 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@60c4f76c +2016-04-07 19:55:07 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@1f7ed7c2 +2016-04-07 19:55:07 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@783e3583 +2016-04-07 19:55:07 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 118 ms +2016-04-07 19:55:07 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 19:55:07 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 19:55:07 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 19:55:07 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 19:55:07 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 19:55:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:55:07 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:55:07 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 19:55:07 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 31 ms +2016-04-07 19:55:07 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 19:55:07 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:55:07 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 19:55:07 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:55:08 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:55:08 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 19:55:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:55:11 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 19:55:11 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 19:55:11 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-07 19:55:11 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:55:11 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 19:55:11 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 19:55:11 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 19:55:11 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 19:55:11 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 19:55:11 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 19:55:11 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-07 19:55:11 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-07 19:55:11 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 19:55:11 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 19:55:11 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 19:55:11 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 19:55:11 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 19:55:11 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 19:55:11 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 19:55:11 DEBUG WPS2SM:201 - Schema: null +2016-04-07 19:55:11 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 19:55:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 19:55:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 19:55:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:55:11 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 19:55:11 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 19:55:11 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:55:11 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:55:11 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 19:55:11 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 19:55:11 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 19:55:11 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 19:55:11 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 19:55:11 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 19:55:11 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 19:55:11 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 19:55:11 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 19:55:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 19:55:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 19:55:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:55:11 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 19:55:11 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 19:55:11 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:55:11 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 19:55:11 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 19:55:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 19:55:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 19:55:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:55:11 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 19:55:11 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-07 19:55:11 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:55:11 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:55:11 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 19:55:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-07 19:55:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-07 19:55:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:55:11 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 19:55:11 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 19:55:11 DEBUG WPS2SM:93 - WPS type: +2016-04-07 19:55:11 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 19:55:11 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 19:55:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-07 19:55:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 19:55:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 19:55:11 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 19:55:11 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-07 19:55:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:55:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:55:11 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 19:55:11 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:55:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:55:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:55:11 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:55:11 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:55:11 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:55:11 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:55:11 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:55:11 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:55:11 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:55:11 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 19:55:11 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 19:55:11 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 19:55:11 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 19:55:11 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 19:55:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-07 19:55:11 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:55:11 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 19:55:12 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-07 19:55:12 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 19:55:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:55:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:55:12 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 19:55:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:55:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:55:12 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 19:55:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:55:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:55:12 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 19:55:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:55:12 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:55:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:55:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:55:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:55:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:55:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:55:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:55:12 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-07 19:55:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:55:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 19:55:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 19:55:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 19:55:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 19:55:12 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:55:12 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:55:12 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 19:55:12 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 19:55:12 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 19:55:12 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 19:55:12 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 19:55:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:55:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:55:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:55:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:55:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 19:55:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:55:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:55:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:55:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 19:55:12 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 19:55:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:55:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:55:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:55:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:55:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:55:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 19:55:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:55:12 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:55:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:55:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:55:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 19:55:12 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 19:55:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:55:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:55:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 19:55:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:55:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:55:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 19:55:12 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 19:55:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 19:55:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:55:12 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:55:12 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:55:12 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 32 ms +2016-04-07 19:55:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 19:55:12 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:55:12 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 19:55:12 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-07 19:55:12 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 19:55:12 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 19:55:13 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 19:55:13 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 19:55:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 19:55:13 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 19:55:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 19:55:13 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 19:55:13 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 19:55:13 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-07 19:55:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 19:55:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 19:55:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-07 19:55:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-07 19:55:13 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 19:55:13 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 19:55:13 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 19:55:13 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:55:13 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 19:55:13 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 19:55:13 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 19:55:13 INFO WorkspaceExplorerServiceImpl:142 - end time - 492 msc 0 sec +2016-04-07 19:55:13 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 19:55:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 19:55:54 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 19:56:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:56:49 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:57:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 19:57:44 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 19:58:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:58:39 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 19:59:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 19:59:34 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 20:01:28 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 20:01:28 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 20:01:28 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 20:01:28 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 20:01:28 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 20:01:28 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 20:01:28 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 20:01:28 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2b6480c9 +2016-04-07 20:01:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 20:01:28 INFO ASLSession:352 - Logging the entrance +2016-04-07 20:01:28 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 20:01:28 DEBUG TemplateModel:83 - 2016-04-07 20:01:28, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 20:01:28 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 20:01:28 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 20:01:32 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 20:01:32 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 20:01:32 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 20:01:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 20:01:32 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 20:01:32 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 20:01:32 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 20:01:32 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 119 ms +2016-04-07 20:01:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 20:01:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 20:01:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 20:01:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 20:01:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 20:01:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 20:01:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 20:01:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 20:01:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 20:01:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 20:01:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 20:01:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 20:01:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 20:01:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 20:01:32 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 20:01:32 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 20:01:32 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 20:01:32 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@15c31971 +2016-04-07 20:01:32 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1c00ba4f +2016-04-07 20:01:32 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@1ce9d4d8 +2016-04-07 20:01:32 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@52ef9da0 +2016-04-07 20:01:32 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 102 ms +2016-04-07 20:01:32 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 20:01:32 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 20:01:32 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 20:01:32 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 20:01:32 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 20:01:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 20:01:32 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 20:01:32 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 20:01:32 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 21 ms +2016-04-07 20:01:32 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 20:01:32 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 20:01:32 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 20:01:32 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 20:01:33 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 20:01:33 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 20:01:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 20:01:36 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 20:01:36 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 20:01:36 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-07 20:01:36 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 20:01:36 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 20:01:37 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 20:01:37 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 20:01:37 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 20:01:37 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 20:01:37 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 20:01:37 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-07 20:01:37 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-07 20:01:37 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 20:01:37 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 20:01:37 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 20:01:37 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 20:01:37 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 20:01:37 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 20:01:37 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 20:01:37 DEBUG WPS2SM:201 - Schema: null +2016-04-07 20:01:37 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 20:01:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 20:01:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 20:01:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:01:37 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 20:01:37 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 20:01:37 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:01:37 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 20:01:37 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 20:01:37 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 20:01:37 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 20:01:37 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 20:01:37 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 20:01:37 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 20:01:37 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 20:01:37 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 20:01:37 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 20:01:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 20:01:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 20:01:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:01:37 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 20:01:37 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 20:01:37 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:01:37 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 20:01:37 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 20:01:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 20:01:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 20:01:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:01:37 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 20:01:37 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-07 20:01:37 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:01:37 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 20:01:37 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 20:01:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-07 20:01:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-07 20:01:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:01:37 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 20:01:37 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 20:01:37 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:01:37 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 20:01:37 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 20:01:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-07 20:01:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 20:01:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:01:37 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 20:01:37 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-07 20:01:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 20:01:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 20:01:37 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 20:01:37 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 20:01:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 20:01:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 20:01:37 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:01:37 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:01:37 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:01:37 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:01:37 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 20:01:37 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 20:01:37 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 20:01:37 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 20:01:37 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 20:01:37 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 20:01:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 20:01:37 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 20:01:37 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 20:01:37 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 20:01:37 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 20:01:37 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-07 20:01:37 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 20:01:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 20:01:37 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:01:37 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 20:01:37 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:01:37 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:01:37 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 20:01:37 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:01:37 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:01:37 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 20:01:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 20:01:37 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 20:01:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 20:01:37 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:01:37 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:01:37 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:01:37 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:01:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 20:01:37 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 20:01:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 20:01:37 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:01:37 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:01:37 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:01:37 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:01:37 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 20:01:37 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 20:01:37 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 20:01:37 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 20:01:37 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 20:01:37 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 20:01:37 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 20:01:37 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:01:37 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:01:37 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:01:37 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:01:37 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:01:37 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:01:37 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:01:37 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 20:01:37 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:01:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:01:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:01:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:01:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 20:01:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:01:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:01:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:01:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:01:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 20:01:38 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 20:01:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:01:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 20:01:38 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 20:01:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:01:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:01:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:01:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:01:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:01:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:01:39 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 20:01:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 20:01:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 20:01:39 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 20:01:39 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 20:01:39 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 29 ms +2016-04-07 20:01:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 20:01:39 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 20:01:39 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 20:01:39 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-07 20:01:39 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 20:01:39 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 20:01:39 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 20:01:39 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 20:01:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 20:01:39 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 20:01:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 20:01:39 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 20:01:39 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 20:01:39 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 18 ms +2016-04-07 20:01:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 20:01:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 20:01:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 20:01:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 20:01:39 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 20:01:39 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 20:01:39 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 20:01:39 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 20:01:39 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 20:01:39 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 20:01:39 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 20:01:39 INFO WorkspaceExplorerServiceImpl:142 - end time - 1453 msc 1 sec +2016-04-07 20:01:39 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 20:02:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 20:02:23 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 20:03:24 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 20:03:24 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 20:03:24 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 20:03:24 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 20:03:24 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 20:03:24 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 20:03:24 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 20:03:24 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@ad20121 +2016-04-07 20:03:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 20:03:24 INFO ASLSession:352 - Logging the entrance +2016-04-07 20:03:24 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 20:03:24 DEBUG TemplateModel:83 - 2016-04-07 20:03:24, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 20:03:24 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 20:03:24 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 20:03:28 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 20:03:28 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 20:03:28 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 20:03:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 20:03:28 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 20:03:28 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 20:03:28 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 20:03:29 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 195 ms +2016-04-07 20:03:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 20:03:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 20:03:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 20:03:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 20:03:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 20:03:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 20:03:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 20:03:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 20:03:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 20:03:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 20:03:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 20:03:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 20:03:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 20:03:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 20:03:29 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 20:03:29 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 20:03:29 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 20:03:29 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@672db63a +2016-04-07 20:03:29 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@53316ce5 +2016-04-07 20:03:29 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@4f07e23d +2016-04-07 20:03:29 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@6b0d8693 +2016-04-07 20:03:29 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 107 ms +2016-04-07 20:03:29 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 20:03:29 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 20:03:29 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 20:03:29 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 20:03:29 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 20:03:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 20:03:29 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 20:03:29 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 20:03:29 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 26 ms +2016-04-07 20:03:29 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 20:03:29 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 20:03:29 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 20:03:29 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 20:03:30 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 20:03:30 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 20:03:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 20:03:33 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 20:03:33 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 20:03:33 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-07 20:03:33 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 20:03:33 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 20:03:33 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 20:03:33 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 20:03:33 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 20:03:33 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 20:03:33 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 20:03:33 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-07 20:03:33 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-07 20:03:33 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 20:03:33 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 20:03:33 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 20:03:33 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 20:03:33 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 20:03:33 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 20:03:33 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 20:03:33 DEBUG WPS2SM:201 - Schema: null +2016-04-07 20:03:33 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 20:03:33 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 20:03:33 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 20:03:33 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:03:33 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 20:03:33 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 20:03:33 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:03:33 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 20:03:33 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 20:03:33 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 20:03:33 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 20:03:33 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 20:03:33 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 20:03:33 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 20:03:33 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 20:03:33 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 20:03:33 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 20:03:33 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 20:03:33 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 20:03:33 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:03:33 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 20:03:33 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 20:03:33 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:03:33 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 20:03:33 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 20:03:33 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 20:03:33 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 20:03:33 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:03:33 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 20:03:33 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-07 20:03:33 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:03:33 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 20:03:33 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 20:03:33 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-07 20:03:33 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-07 20:03:33 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:03:33 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 20:03:33 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 20:03:33 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:03:33 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 20:03:33 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 20:03:33 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-07 20:03:33 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 20:03:33 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:03:33 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 20:03:33 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-07 20:03:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 20:03:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 20:03:33 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 20:03:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 20:03:33 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:03:33 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:03:33 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 20:03:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 20:03:33 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:03:33 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:03:33 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 20:03:33 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 20:03:33 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 20:03:34 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 20:03:34 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 20:03:34 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 20:03:34 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 20:03:34 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 20:03:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 20:03:34 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 20:03:34 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 20:03:34 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 23 ms +2016-04-07 20:03:34 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 20:03:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 20:03:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:03:34 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 20:03:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:03:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:03:34 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 20:03:34 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:03:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:03:34 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 20:03:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 20:03:34 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 20:03:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 20:03:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:03:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:03:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:03:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:03:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 20:03:34 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 20:03:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 20:03:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:03:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:03:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:03:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:03:34 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 20:03:34 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 20:03:34 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 20:03:34 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 20:03:34 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 20:03:34 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 20:03:34 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 20:03:34 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:03:34 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:03:34 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:03:34 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:03:34 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:03:34 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:03:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:03:34 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:03:34 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:03:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:03:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:03:34 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 20:03:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:03:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 20:03:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:03:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:03:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 20:03:34 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 20:03:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 20:03:34 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 20:03:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:03:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:03:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:03:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:03:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:03:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:03:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:03:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:03:34 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 20:03:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 20:03:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 20:03:34 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 20:03:34 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 20:03:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 33 ms +2016-04-07 20:03:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 20:03:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 20:03:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 20:03:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-07 20:03:35 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 20:03:35 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 20:03:35 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 20:03:35 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 20:03:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 20:03:35 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 20:03:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 20:03:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 20:03:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 20:03:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 32 ms +2016-04-07 20:03:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 20:03:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 20:03:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-07 20:03:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 20:03:35 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 20:03:35 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 20:03:35 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 20:03:35 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 20:03:35 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 20:03:35 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 20:03:35 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 20:03:35 INFO WorkspaceExplorerServiceImpl:142 - end time - 481 msc 0 sec +2016-04-07 20:03:35 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 20:04:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 20:04:19 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 20:05:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 20:05:03 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 20:05:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 20:05:03 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-07 20:05:03 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 20:05:03 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 20:05:03 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 20:05:03 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 20:05:03 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 20:05:03 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 20:05:03 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 20:05:03 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-07 20:05:03 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-07 20:05:03 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 20:05:03 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 20:05:03 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 20:05:03 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 20:05:03 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 20:05:03 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 20:05:03 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 20:05:03 DEBUG WPS2SM:201 - Schema: null +2016-04-07 20:05:03 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 20:05:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 20:05:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 20:05:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:05:03 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 20:05:03 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 20:05:03 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:05:03 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 20:05:03 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 20:05:03 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 20:05:03 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 20:05:03 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 20:05:03 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 20:05:03 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 20:05:03 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 20:05:03 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 20:05:03 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 20:05:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 20:05:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 20:05:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:05:03 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 20:05:03 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 20:05:03 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:05:03 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 20:05:03 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 20:05:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 20:05:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 20:05:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:05:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 20:05:03 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-07 20:05:03 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:05:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 20:05:03 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 20:05:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-07 20:05:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-07 20:05:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:05:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 20:05:03 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 20:05:03 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:05:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 20:05:03 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 20:05:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-07 20:05:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 20:05:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:05:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 20:05:03 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-07 20:05:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 20:05:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 20:05:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 20:05:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:05:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:05:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:05:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:05:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:05:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:05:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:05:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 20:05:03 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 20:05:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 20:05:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:05:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:05:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:05:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:05:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:05:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:05:03 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 20:05:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:05:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 20:05:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:05:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 20:05:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 20:05:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:05:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 20:05:03 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 20:05:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 20:05:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:05:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:05:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 20:05:03 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 20:05:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 20:05:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:05:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:05:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:05:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:05:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:05:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:05:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:05:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:05:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:05:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:05:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:05:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:05:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:05:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:05:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:05:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:05:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 20:05:03 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 20:05:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:05:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:05:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:05:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:05:03 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 20:05:04 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 20:05:04 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 20:05:04 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 20:05:04 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 20:05:04 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 20:05:04 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 20:05:04 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 20:05:04 INFO WorkspaceExplorerServiceImpl:142 - end time - 257 msc 0 sec +2016-04-07 20:05:04 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 20:05:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 20:05:14 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 20:06:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 20:06:09 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 20:07:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 20:07:04 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 20:07:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 20:07:59 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 20:08:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 20:08:54 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 20:09:35 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-07 20:09:35 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-07 20:09:35 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-07 20:09:35 INFO SessionUtil:49 - no user found in session, use test user +2016-04-07 20:09:35 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-07 20:09:35 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 20:09:35 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-07 20:09:35 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2f455446 +2016-04-07 20:09:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 20:09:35 INFO ASLSession:352 - Logging the entrance +2016-04-07 20:09:35 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-07 20:09:35 DEBUG TemplateModel:83 - 2016-04-07 20:09:35, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-07 20:09:35 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 20:09:35 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-07 20:09:39 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-07 20:09:39 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-07 20:09:39 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-07 20:09:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 20:09:39 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 20:09:39 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 20:09:39 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 20:09:39 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 118 ms +2016-04-07 20:09:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-07 20:09:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-07 20:09:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-07 20:09:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-07 20:09:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-07 20:09:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-07 20:09:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-07 20:09:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-07 20:09:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-07 20:09:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-07 20:09:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-07 20:09:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-07 20:09:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-07 20:09:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-07 20:09:39 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-07 20:09:39 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 20:09:39 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-07 20:09:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@41a41744 +2016-04-07 20:09:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@35b8a9f8 +2016-04-07 20:09:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@68498404 +2016-04-07 20:09:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@763d94d0 +2016-04-07 20:09:39 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 110 ms +2016-04-07 20:09:39 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-07 20:09:39 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-07 20:09:39 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-07 20:09:39 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-07 20:09:39 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-07 20:09:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 20:09:40 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 20:09:40 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-07 20:09:40 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 25 ms +2016-04-07 20:09:40 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-07 20:09:40 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 20:09:40 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-07 20:09:40 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 20:09:40 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 20:09:40 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-07 20:09:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 20:09:44 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 20:09:44 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 20:09:44 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-07 20:09:44 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 20:09:44 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 20:09:44 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 20:09:44 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 20:09:44 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 20:09:44 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 20:09:44 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 20:09:44 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-07 20:09:44 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-07 20:09:44 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 20:09:44 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 20:09:44 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 20:09:44 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 20:09:44 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 20:09:44 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 20:09:44 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 20:09:44 DEBUG WPS2SM:201 - Schema: null +2016-04-07 20:09:44 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 20:09:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 20:09:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 20:09:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:09:44 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 20:09:44 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 20:09:44 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:09:44 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 20:09:44 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 20:09:44 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 20:09:44 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 20:09:44 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 20:09:44 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 20:09:44 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 20:09:44 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 20:09:44 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 20:09:44 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 20:09:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 20:09:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 20:09:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:09:44 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 20:09:44 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 20:09:44 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:09:44 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 20:09:44 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 20:09:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 20:09:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 20:09:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:09:44 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 20:09:44 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-07 20:09:44 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:09:44 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 20:09:44 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 20:09:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-07 20:09:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-07 20:09:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:09:44 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 20:09:44 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 20:09:44 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:09:44 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 20:09:44 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-07 20:09:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-07 20:09:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 20:09:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:09:44 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-07 20:09:44 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-07 20:09:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 20:09:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 20:09:45 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 20:09:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 20:09:45 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 20:09:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 20:09:45 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:09:45 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:09:45 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:09:45 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:09:45 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 20:09:45 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-07 20:09:45 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 20:09:45 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-07 20:09:45 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 20:09:45 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-07 20:09:45 DEBUG JCRRepository:271 - Initialize repository +2016-04-07 20:09:45 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-07 20:09:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-07 20:09:45 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 20:09:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-07 20:09:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 21 ms +2016-04-07 20:09:45 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-07 20:09:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 20:09:45 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:09:45 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-07 20:09:45 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:09:45 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-07 20:09:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:09:45 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:09:45 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:09:45 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-07 20:09:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 20:09:45 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 20:09:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 20:09:45 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:09:45 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:09:45 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:09:45 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:09:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 20:09:45 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 20:09:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 20:09:45 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:09:45 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:09:45 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:09:45 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:09:45 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 20:09:45 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 20:09:45 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-07 20:09:45 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-07 20:09:45 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-07 20:09:45 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-07 20:09:45 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-07 20:09:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:09:45 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:09:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:09:45 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:09:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:09:45 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:09:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:09:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:09:45 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 20:09:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:09:45 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:09:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:09:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:09:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:09:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:09:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 20:09:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:09:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-07 20:09:45 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-07 20:09:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:09:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 20:09:45 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 20:09:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:09:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:09:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:09:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:09:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:09:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:09:45 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 20:09:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 20:09:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 20:09:46 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 20:09:46 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 20:09:46 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 28 ms +2016-04-07 20:09:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 20:09:46 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 20:09:46 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-07 20:09:46 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-07 20:09:46 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-07 20:09:46 INFO ISClientConnector:82 - found only one RR, take it +2016-04-07 20:09:46 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-07 20:09:46 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-07 20:09:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 20:09:46 DEBUG StorageClient:517 - set scope: /gcube +2016-04-07 20:09:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 20:09:46 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-07 20:09:46 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-07 20:09:46 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-07 20:09:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 20:09:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 20:09:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-07 20:09:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 20:09:46 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 20:09:46 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 20:09:46 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 20:09:46 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 20:09:46 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 20:09:46 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 20:09:46 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 20:09:46 INFO WorkspaceExplorerServiceImpl:142 - end time - 436 msc 0 sec +2016-04-07 20:09:46 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 20:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-07 20:10:05 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-07 20:10:05 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-07 20:10:05 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-07 20:10:05 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 20:10:05 INFO StatWPSClientSession:84 - CONNECT +2016-04-07 20:10:05 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-07 20:10:05 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-07 20:10:05 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-07 20:10:05 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-07 20:10:05 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-07 20:10:05 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-07 20:10:05 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-07 20:10:05 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-07 20:10:05 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-07 20:10:05 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-07 20:10:05 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-07 20:10:05 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-07 20:10:05 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-07 20:10:05 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-07 20:10:05 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-07 20:10:05 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-07 20:10:05 DEBUG WPS2SM:201 - Schema: null +2016-04-07 20:10:05 DEBUG WPS2SM:202 - Encoding: null +2016-04-07 20:10:05 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-07 20:10:05 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-07 20:10:05 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:10:05 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-07 20:10:05 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-07 20:10:05 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:10:05 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 20:10:05 DEBUG WPS2SM:101 - Guessed default value: +2016-04-07 20:10:05 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-07 20:10:05 DEBUG WPS2SM:112 - Machter find: true +2016-04-07 20:10:05 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-07 20:10:05 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-07 20:10:05 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-07 20:10:05 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-07 20:10:05 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-07 20:10:05 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-07 20:10:05 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-07 20:10:05 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-07 20:10:05 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:10:05 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-07 20:10:05 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-07 20:10:05 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:10:05 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-07 20:10:05 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-07 20:10:05 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-07 20:10:05 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-07 20:10:05 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:10:05 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-07 20:10:05 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-07 20:10:05 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:10:05 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 20:10:05 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-07 20:10:05 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-07 20:10:05 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-07 20:10:05 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:10:05 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-07 20:10:05 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-07 20:10:05 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:10:05 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 20:10:05 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-07 20:10:05 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-07 20:10:05 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-07 20:10:05 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:10:05 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-07 20:10:05 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-07 20:10:05 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:10:05 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 20:10:05 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-07 20:10:05 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-07 20:10:05 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-07 20:10:05 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:10:05 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-07 20:10:05 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-07 20:10:05 DEBUG WPS2SM:93 - WPS type: +2016-04-07 20:10:05 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-07 20:10:05 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-07 20:10:05 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-07 20:10:05 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-07 20:10:05 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-07 20:10:05 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-07 20:10:05 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-07 20:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 20:10:05 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 20:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 20:10:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:10:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:10:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:10:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:10:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:10:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:10:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 20:10:05 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-07 20:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-07 20:10:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:10:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:10:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:10:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:10:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:10:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:10:05 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-07 20:10:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:10:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-07 20:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 20:10:05 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 20:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 20:10:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:10:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:10:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:10:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:10:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:10:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:10:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 20:10:05 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 20:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 20:10:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-07 20:10:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-07 20:10:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-07 20:10:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-07 20:10:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-07 20:10:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-07 20:10:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-07 20:10:05 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-07 20:10:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:10:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:10:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:10:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:10:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:10:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:10:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 20:10:05 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-07 20:10:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-07 20:10:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:10:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-07 20:10:05 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-07 20:10:05 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-07 20:10:05 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-07 20:10:05 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-07 20:10:05 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 20:10:05 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-07 20:10:05 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-07 20:10:05 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-07 20:10:05 INFO WorkspaceExplorerServiceImpl:142 - end time - 233 msc 0 sec +2016-04-07 20:10:05 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-07 20:10:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 20:10:31 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 20:11:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-07 20:11:26 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-07 20:12:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-07 20:12:21 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 11:15:01 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 11:15:01 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 11:15:01 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 11:15:02 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 11:15:02 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 11:15:02 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 11:15:02 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 11:15:02 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@228a7a61 +2016-04-08 11:15:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:15:02 INFO ASLSession:352 - Logging the entrance +2016-04-08 11:15:02 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 11:15:02 DEBUG TemplateModel:83 - 2016-04-08 11:15:02, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 11:15:02 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 11:15:02 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 11:15:07 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 11:15:07 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 11:15:07 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 11:15:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:15:07 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 11:15:07 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 11:15:07 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 11:15:07 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 121 ms +2016-04-08 11:15:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 11:15:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 11:15:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 11:15:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 11:15:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 11:15:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 11:15:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 11:15:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 11:15:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 11:15:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 11:15:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 11:15:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 11:15:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 11:15:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 11:15:07 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 11:15:07 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 11:15:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 11:15:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@283161f6 +2016-04-08 11:15:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1d8bb8b0 +2016-04-08 11:15:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@4e470ea6 +2016-04-08 11:15:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@503f6b50 +2016-04-08 11:15:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 181 ms +2016-04-08 11:15:08 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 11:15:08 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 11:15:08 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 11:15:08 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 11:15:08 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 11:15:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:15:08 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 11:15:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 11:15:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 25 ms +2016-04-08 11:15:08 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 11:15:08 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:15:08 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 11:15:08 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 11:15:09 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:15:09 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 11:15:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:15:15 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 11:15:15 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 11:15:15 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 11:15:15 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:15:15 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 11:15:15 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 11:15:15 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 11:15:15 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 11:15:15 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 11:15:15 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 11:15:15 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 11:15:15 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 11:15:15 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 11:15:15 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 11:15:15 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 11:15:15 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:15:15 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 11:15:15 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 11:15:15 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 11:15:15 DEBUG WPS2SM:201 - Schema: null +2016-04-08 11:15:15 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 11:15:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 11:15:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 11:15:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:15:15 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 11:15:15 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 11:15:15 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:15:15 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 11:15:15 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 11:15:15 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 11:15:15 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 11:15:15 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 11:15:15 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 11:15:15 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 11:15:15 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 11:15:15 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 11:15:15 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 11:15:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 11:15:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 11:15:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:15:15 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 11:15:15 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 11:15:15 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:15:15 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 11:15:15 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 11:15:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 11:15:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 11:15:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:15:15 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 11:15:15 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 11:15:15 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:15:15 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:15:15 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 11:15:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 11:15:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 11:15:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:15:15 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 11:15:15 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 11:15:15 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:15:15 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:15:15 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 11:15:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 11:15:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 11:15:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:15:15 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 11:15:15 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 11:15:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 11:15:15 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 11:15:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:15:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 11:15:15 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 11:15:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:15:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:15:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:15:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:15:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:15:16 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 11:15:16 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 11:15:16 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 11:15:16 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 11:15:16 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 11:15:16 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 11:15:16 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 11:15:16 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 11:15:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 11:15:16 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 11:15:16 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 11:15:16 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 35 ms +2016-04-08 11:15:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:15:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:15:16 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 11:15:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:15:16 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:15:16 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 11:15:16 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:15:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:15:16 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 11:15:16 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:15:16 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 11:15:16 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:15:16 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 11:15:16 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 11:15:16 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 11:15:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:15:16 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:15:16 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 11:15:16 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 11:15:16 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:15:16 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:15:16 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 11:15:16 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:15:16 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:15:16 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 11:15:16 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:15:16 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:15:16 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:15:16 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:15:16 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 11:15:17 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 11:15:17 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 11:15:17 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 11:15:17 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 11:15:17 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 11:15:17 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 11:15:17 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 11:15:17 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:15:17 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:15:17 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:15:17 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:15:17 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:15:17 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:15:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:15:17 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:15:17 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:15:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:15:17 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 11:15:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:15:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 11:15:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:15:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:15:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:15:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 11:15:17 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 11:15:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:15:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:15:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:15:17 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 11:15:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:15:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:15:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:15:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:15:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:15:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:15:17 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 11:15:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 11:15:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:15:17 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 11:15:17 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 11:15:17 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 35 ms +2016-04-08 11:15:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 11:15:17 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 11:15:17 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 11:15:17 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 28 ms +2016-04-08 11:15:17 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 11:15:17 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 11:15:17 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 11:15:17 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 11:15:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 11:15:17 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 11:15:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 11:15:17 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 11:15:17 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 11:15:17 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-08 11:15:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 11:15:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 11:15:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 11:15:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:15:17 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 11:15:17 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 11:15:17 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 11:15:17 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 11:15:17 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 11:15:17 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 11:15:17 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 11:15:17 INFO WorkspaceExplorerServiceImpl:142 - end time - 489 msc 0 sec +2016-04-08 11:15:17 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 11:15:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 11:15:55 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 11:15:55 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 11:15:55 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-08 11:15:55 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:15:55 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 11:15:56 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 11:15:56 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 11:15:56 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 11:15:56 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 11:15:56 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 11:15:56 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-08 11:15:56 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-08 11:15:56 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-08 11:15:56 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 11:15:56 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 11:15:56 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 11:15:56 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 11:15:56 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:15:56 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 11:15:56 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 11:15:56 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 11:15:56 DEBUG WPS2SM:201 - Schema: null +2016-04-08 11:15:56 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 11:15:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 11:15:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 11:15:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:15:56 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 11:15:56 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 11:15:56 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:15:56 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 11:15:56 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 11:15:56 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 11:15:56 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 11:15:56 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 11:15:56 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 11:15:56 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 11:15:56 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 11:15:56 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 11:15:56 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 11:15:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 11:15:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 11:15:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:15:56 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 11:15:56 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 11:15:56 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:15:56 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 11:15:56 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 11:15:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 11:15:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 11:15:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:15:56 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 11:15:56 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-08 11:15:56 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:15:56 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:15:56 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-08 11:15:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-08 11:15:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-08 11:15:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:15:56 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-08 11:15:56 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-08 11:15:56 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:15:56 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:15:56 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 11:15:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-08 11:15:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-08 11:15:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:15:56 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 11:15:56 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-08 11:15:56 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:15:56 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:15:56 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-08 11:15:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-08 11:15:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-08 11:15:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:15:56 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-08 11:15:56 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 11:15:56 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:15:56 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:15:56 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 11:15:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 11:15:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 11:15:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:15:56 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 11:15:56 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 11:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:15:56 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 11:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:15:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:15:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:15:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:15:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:15:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:15:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:15:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 11:15:56 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 11:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 11:15:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:15:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:15:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:15:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:15:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:15:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:15:56 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 11:15:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:15:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 11:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:15:56 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 11:15:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:15:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:15:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 11:15:56 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 11:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 11:15:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:15:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:15:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:15:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:15:56 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 11:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:15:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:15:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:15:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:15:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:15:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:15:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:15:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:15:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:15:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:15:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:15:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:15:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:15:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 11:15:56 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 11:15:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:15:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:15:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:15:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:15:56 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 11:15:56 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 11:15:56 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 11:15:56 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 11:15:56 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 11:15:56 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 11:15:56 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 11:15:56 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 11:15:56 INFO WorkspaceExplorerServiceImpl:142 - end time - 238 msc 0 sec +2016-04-08 11:15:56 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 11:15:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 11:15:57 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 11:16:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:16:00 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 11:16:00 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 11:16:00 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-08 11:16:00 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:16:00 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 11:16:01 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 11:16:01 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 11:16:01 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 11:16:01 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-08 11:16:01 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-08 11:16:01 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-08 11:16:01 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-08 11:16:01 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-08 11:16:01 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-08 11:16:01 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 11:16:01 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 11:16:01 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 11:16:01 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:16:01 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-08 11:16:01 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 11:16:01 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 11:16:01 DEBUG WPS2SM:201 - Schema: null +2016-04-08 11:16:01 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 11:16:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 11:16:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-08 11:16:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:16:01 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 11:16:01 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 11:16:01 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:16:01 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 11:16:01 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 11:16:01 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 11:16:01 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 11:16:01 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-08 11:16:01 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 11:16:01 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-08 11:16:01 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 11:16:01 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-08 11:16:01 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 11:16:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-08 11:16:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 11:16:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:16:01 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 11:16:01 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-08 11:16:01 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:16:01 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 11:16:01 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-08 11:16:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 11:16:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-08 11:16:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:16:01 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-08 11:16:01 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-08 11:16:01 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:16:01 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:16:01 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 11:16:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-08 11:16:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-08 11:16:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:16:01 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 11:16:01 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-08 11:16:01 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:16:01 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:16:01 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 11:16:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-08 11:16:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-08 11:16:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:16:01 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 11:16:01 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-08 11:16:01 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:16:01 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 11:16:01 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-08 11:16:01 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-08 11:16:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-08 11:16:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-08 11:16:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:16:01 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-08 11:16:01 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-08 11:16:01 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:16:01 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:16:01 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 11:16:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-08 11:16:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-08 11:16:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:16:01 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 11:16:01 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 11:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 11:16:01 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 11:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 11:16:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:16:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:16:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:16:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:16:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:16:01 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:16:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 11:16:01 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 11:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 11:16:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:16:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:16:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:16:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:16:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:16:01 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:16:01 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 11:16:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:16:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 11:16:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 11:16:01 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 11:16:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:16:01 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 11:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:16:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:16:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:16:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:16:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:16:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:16:01 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:16:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:16:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:16:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:16:01 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 11:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:16:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:16:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:16:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:16:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:16:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:16:01 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:16:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:16:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:16:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:16:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:16:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:16:01 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 11:16:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:16:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:16:01 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:16:01 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 11:16:01 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 11:16:01 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 11:16:01 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 11:16:01 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 11:16:01 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 11:16:01 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 11:16:01 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 11:16:01 INFO WorkspaceExplorerServiceImpl:142 - end time - 193 msc 0 sec +2016-04-08 11:16:01 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 11:16:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:16:52 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 11:17:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:17:47 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 11:18:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 11:18:42 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 11:19:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 11:19:37 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 11:30:53 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 11:30:53 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 11:30:53 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 11:30:53 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 11:30:53 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 11:30:53 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 11:30:53 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 11:30:53 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2195dd70 +2016-04-08 11:30:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:30:53 INFO ASLSession:352 - Logging the entrance +2016-04-08 11:30:53 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 11:30:53 DEBUG TemplateModel:83 - 2016-04-08 11:30:53, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 11:30:53 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 11:30:53 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 11:30:57 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 11:30:57 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 11:30:57 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 11:30:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:30:57 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 11:30:57 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 11:30:57 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 11:30:57 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 114 ms +2016-04-08 11:30:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 11:30:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 11:30:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 11:30:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 11:30:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 11:30:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 11:30:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 11:30:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 11:30:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 11:30:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 11:30:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 11:30:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 11:30:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 11:30:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 11:30:57 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 11:30:57 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 11:30:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 11:30:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@53f5ca40 +2016-04-08 11:30:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1e9f6138 +2016-04-08 11:30:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@1ff48776 +2016-04-08 11:30:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@4449920 +2016-04-08 11:30:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 126 ms +2016-04-08 11:30:57 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 11:30:57 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 11:30:57 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 11:30:57 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 11:30:57 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 11:30:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:30:57 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 11:30:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 11:30:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 32 ms +2016-04-08 11:30:57 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 11:30:57 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:30:57 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 11:30:57 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 11:30:58 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:30:58 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 11:31:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:31:02 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 11:31:02 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 11:31:02 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 11:31:02 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:31:02 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 11:31:03 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 11:31:03 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 11:31:03 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 11:31:03 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 11:31:03 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 11:31:03 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 11:31:03 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 11:31:03 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 11:31:03 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 11:31:03 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 11:31:03 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:31:03 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 11:31:03 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 11:31:03 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 11:31:03 DEBUG WPS2SM:201 - Schema: null +2016-04-08 11:31:03 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 11:31:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 11:31:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 11:31:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:31:03 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 11:31:03 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 11:31:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:31:03 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 11:31:03 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 11:31:03 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 11:31:03 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 11:31:03 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 11:31:03 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 11:31:03 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 11:31:03 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 11:31:03 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 11:31:03 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 11:31:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 11:31:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 11:31:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:31:03 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 11:31:03 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 11:31:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:31:03 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 11:31:03 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 11:31:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 11:31:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 11:31:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:31:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 11:31:03 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 11:31:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:31:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:31:03 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 11:31:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 11:31:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 11:31:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:31:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 11:31:03 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 11:31:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:31:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:31:03 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 11:31:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 11:31:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 11:31:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:31:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 11:31:03 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 11:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:31:03 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 11:31:03 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 11:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:31:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:31:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:31:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:31:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:31:03 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 11:31:03 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 11:31:03 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 11:31:03 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 11:31:03 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 11:31:03 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 11:31:03 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 11:31:03 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 11:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 11:31:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 11:31:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 11:31:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 19 ms +2016-04-08 11:31:03 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 11:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:31:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:31:03 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 11:31:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:31:03 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 11:31:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:31:03 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 11:31:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:31:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:31:03 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 11:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:31:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:31:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:31:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:31:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 11:31:03 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 11:31:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 11:31:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:31:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:31:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:31:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:31:03 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 11:31:03 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 11:31:03 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 11:31:03 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 11:31:03 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 11:31:03 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 11:31:03 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 11:31:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:31:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:31:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:31:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:31:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:31:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:31:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:31:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:31:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:31:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:31:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:31:03 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 11:31:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:31:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:31:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:31:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 11:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:31:04 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 11:31:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:31:04 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 11:31:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:31:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:31:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:31:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:31:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:31:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:31:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:31:04 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 11:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 11:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 11:31:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 11:31:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 11:31:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 29 ms +2016-04-08 11:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 11:31:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 11:31:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 11:31:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-08 11:31:04 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 11:31:04 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 11:31:04 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 11:31:04 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 11:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 11:31:04 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 11:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 11:31:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 11:31:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 11:31:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 11:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 11:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 11:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 11:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 11:31:04 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 11:31:04 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 11:31:04 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 11:31:04 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 11:31:04 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 11:31:04 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 11:31:04 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 11:31:04 INFO WorkspaceExplorerServiceImpl:142 - end time - 451 msc 0 sec +2016-04-08 11:31:04 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 11:31:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:31:48 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 11:32:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 11:32:43 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 11:33:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 11:33:38 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 11:49:28 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 11:49:28 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 11:49:28 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 11:49:28 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 11:49:28 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 11:49:28 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 11:49:28 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 11:49:28 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2a01a385 +2016-04-08 11:49:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:49:28 INFO ASLSession:352 - Logging the entrance +2016-04-08 11:49:28 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 11:49:28 DEBUG TemplateModel:83 - 2016-04-08 11:49:28, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 11:49:28 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 11:49:28 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 11:49:32 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 11:49:32 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 11:49:32 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 11:49:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:49:32 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 11:49:32 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 11:49:32 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 11:49:32 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 134 ms +2016-04-08 11:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 11:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 11:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 11:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 11:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 11:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 11:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 11:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 11:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 11:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 11:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 11:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 11:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 11:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 11:49:32 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 11:49:32 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 11:49:32 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 11:49:32 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@28fff902 +2016-04-08 11:49:32 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1cc52ec6 +2016-04-08 11:49:32 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@2af5b673 +2016-04-08 11:49:32 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@4eca1d1f +2016-04-08 11:49:32 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 112 ms +2016-04-08 11:49:32 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 11:49:33 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 11:49:33 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 11:49:33 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 11:49:33 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 11:49:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:49:33 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 11:49:33 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 11:49:33 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 28 ms +2016-04-08 11:49:33 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 11:49:33 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:49:33 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 11:49:33 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 11:49:33 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:49:33 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 11:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:49:37 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 11:49:37 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 11:49:37 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 11:49:37 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:49:37 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 11:49:37 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 11:49:37 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 11:49:37 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 11:49:37 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 11:49:37 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 11:49:37 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 11:49:37 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 11:49:37 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 11:49:37 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 11:49:37 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 11:49:37 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:49:37 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 11:49:37 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 11:49:37 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 11:49:37 DEBUG WPS2SM:201 - Schema: null +2016-04-08 11:49:37 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 11:49:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 11:49:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 11:49:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:49:37 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 11:49:37 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 11:49:37 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:49:37 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 11:49:37 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 11:49:37 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 11:49:37 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 11:49:37 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 11:49:37 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 11:49:37 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 11:49:37 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 11:49:37 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 11:49:37 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 11:49:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 11:49:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 11:49:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:49:37 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 11:49:37 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 11:49:37 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:49:37 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 11:49:37 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 11:49:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 11:49:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 11:49:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:49:37 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 11:49:37 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 11:49:37 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:49:37 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:49:37 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 11:49:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 11:49:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 11:49:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:49:37 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 11:49:37 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 11:49:37 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:49:37 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:49:37 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 11:49:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 11:49:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 11:49:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:49:37 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 11:49:37 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 11:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:49:37 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 11:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:49:37 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:49:37 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 11:49:37 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 11:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 11:49:37 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:49:37 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:49:38 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 11:49:38 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 11:49:38 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 11:49:38 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 11:49:38 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 11:49:38 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 11:49:38 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 11:49:38 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 11:49:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 11:49:38 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 11:49:38 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 11:49:38 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 23 ms +2016-04-08 11:49:38 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 11:49:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:49:38 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:49:38 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 11:49:38 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:49:38 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 11:49:38 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:49:38 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:49:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:49:38 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 11:49:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 11:49:38 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 11:49:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 11:49:38 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:49:38 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:49:38 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:49:38 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:49:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:49:38 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 11:49:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:49:38 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:49:38 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:49:38 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:49:38 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:49:38 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 11:49:38 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 11:49:38 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 11:49:38 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 11:49:38 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 11:49:38 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 11:49:38 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 11:49:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:49:38 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:49:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:49:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:49:38 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:49:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:49:38 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:49:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:49:38 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:49:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:49:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:49:38 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 11:49:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:49:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:49:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 11:49:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:49:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:49:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:49:38 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 11:49:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:49:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 11:49:38 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 11:49:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:49:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:49:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:49:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:49:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:49:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:49:38 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 11:49:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 11:49:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:49:39 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 11:49:39 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 11:49:39 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 31 ms +2016-04-08 11:49:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 11:49:39 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 11:49:39 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 11:49:39 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-08 11:49:39 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 11:49:39 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 11:49:39 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 11:49:39 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 11:49:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 11:49:39 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 11:49:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 11:49:39 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 11:49:39 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 11:49:39 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 11:49:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 11:49:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 11:49:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 11:49:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:49:39 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 11:49:39 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 11:49:39 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 11:49:39 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 11:49:39 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 11:49:39 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 11:49:39 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 11:49:39 INFO WorkspaceExplorerServiceImpl:142 - end time - 458 msc 0 sec +2016-04-08 11:49:39 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 11:50:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 11:50:23 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 11:51:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:51:18 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 11:52:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:52:13 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 11:52:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:52:34 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 11:52:34 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 11:52:34 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-08 11:52:34 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:52:34 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 11:52:35 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 11:52:35 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 11:52:35 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 11:52:35 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 11:52:35 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 11:52:35 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-08 11:52:35 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-08 11:52:35 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-08 11:52:35 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 11:52:35 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 11:52:35 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 11:52:35 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 11:52:35 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:52:35 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 11:52:35 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 11:52:35 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 11:52:35 DEBUG WPS2SM:201 - Schema: null +2016-04-08 11:52:35 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 11:52:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 11:52:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 11:52:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:52:35 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 11:52:35 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 11:52:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:52:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 11:52:35 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 11:52:35 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 11:52:35 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 11:52:35 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 11:52:35 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 11:52:35 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 11:52:35 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 11:52:35 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 11:52:35 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 11:52:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 11:52:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 11:52:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:52:35 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 11:52:35 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 11:52:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:52:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 11:52:35 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 11:52:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 11:52:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 11:52:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:52:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 11:52:35 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-08 11:52:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:52:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:52:35 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-08 11:52:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-08 11:52:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-08 11:52:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:52:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-08 11:52:35 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-08 11:52:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:52:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:52:35 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 11:52:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-08 11:52:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-08 11:52:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:52:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 11:52:35 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-08 11:52:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:52:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:52:35 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-08 11:52:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-08 11:52:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-08 11:52:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:52:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-08 11:52:35 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 11:52:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:52:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:52:35 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 11:52:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 11:52:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 11:52:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:52:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 11:52:35 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 11:52:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 11:52:35 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 11:52:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 11:52:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:52:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:52:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:52:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:52:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:52:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:52:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:52:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 11:52:35 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 11:52:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 11:52:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:52:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:52:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:52:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:52:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:52:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:52:35 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 11:52:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:52:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 11:52:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 11:52:35 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 11:52:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:52:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:52:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:52:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 11:52:35 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 11:52:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 11:52:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:52:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:52:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:52:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:52:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:52:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:52:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:52:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:52:35 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 11:52:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:52:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:52:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:52:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:52:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:52:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:52:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:52:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:52:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:52:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:52:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:52:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:52:35 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 11:52:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:52:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:52:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:52:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:52:35 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 11:52:35 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 11:52:35 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 11:52:35 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 11:52:35 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 11:52:35 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 11:52:35 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 11:52:35 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 11:52:35 INFO WorkspaceExplorerServiceImpl:142 - end time - 229 msc 0 sec +2016-04-08 11:52:35 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 11:52:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:52:41 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 11:52:41 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 11:52:41 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-08 11:52:41 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:52:41 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 11:52:41 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 11:52:41 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 11:52:41 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 11:52:41 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-08 11:52:41 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-08 11:52:41 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-08 11:52:41 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-08 11:52:41 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-08 11:52:41 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-08 11:52:41 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 11:52:41 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 11:52:41 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 11:52:41 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 11:52:41 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-08 11:52:41 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 11:52:41 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 11:52:41 DEBUG WPS2SM:201 - Schema: null +2016-04-08 11:52:41 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 11:52:41 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 11:52:41 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-08 11:52:41 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:52:41 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 11:52:41 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 11:52:41 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:52:41 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 11:52:41 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 11:52:41 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 11:52:41 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 11:52:41 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-08 11:52:41 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 11:52:41 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-08 11:52:41 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 11:52:41 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-08 11:52:41 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 11:52:41 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-08 11:52:41 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 11:52:41 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:52:41 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 11:52:41 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-08 11:52:41 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:52:41 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 11:52:41 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-08 11:52:41 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 11:52:41 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-08 11:52:41 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:52:41 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-08 11:52:41 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-08 11:52:41 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:52:41 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:52:41 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 11:52:41 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-08 11:52:41 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-08 11:52:41 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:52:41 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 11:52:41 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-08 11:52:41 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:52:41 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:52:41 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 11:52:41 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-08 11:52:41 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-08 11:52:41 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:52:41 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 11:52:41 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-08 11:52:41 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:52:41 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 11:52:41 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-08 11:52:41 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-08 11:52:41 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-08 11:52:41 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-08 11:52:41 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:52:41 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-08 11:52:41 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-08 11:52:42 DEBUG WPS2SM:93 - WPS type: +2016-04-08 11:52:42 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 11:52:42 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 11:52:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-08 11:52:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-08 11:52:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 11:52:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 11:52:42 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 11:52:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:52:42 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 11:52:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:52:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:52:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:52:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:52:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:52:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:52:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:52:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:52:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 11:52:42 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 11:52:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 11:52:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:52:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:52:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:52:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:52:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:52:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:52:42 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 11:52:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:52:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 11:52:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 11:52:42 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 11:52:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:52:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:52:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:52:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:52:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 11:52:42 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 11:52:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 11:52:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:52:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:52:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:52:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:52:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:52:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:52:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:52:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:52:42 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 11:52:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:52:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 11:52:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 11:52:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 11:52:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 11:52:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 11:52:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 11:52:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:52:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:52:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:52:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:52:42 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 11:52:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:52:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 11:52:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:52:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 11:52:42 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 11:52:42 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 11:52:42 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 11:52:42 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 11:52:42 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 11:52:42 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 11:52:42 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 11:52:42 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 11:52:42 INFO WorkspaceExplorerServiceImpl:142 - end time - 189 msc 0 sec +2016-04-08 11:52:42 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 11:53:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 11:53:08 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 11:54:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 11:54:03 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 11:54:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 11:54:58 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:03:04 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 12:03:04 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 12:03:04 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 12:03:04 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 12:03:04 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 12:03:04 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:03:04 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 12:03:04 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@a12f050 +2016-04-08 12:03:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:03:04 INFO ASLSession:352 - Logging the entrance +2016-04-08 12:03:04 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 12:03:04 DEBUG TemplateModel:83 - 2016-04-08 12:03:04, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 12:03:04 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:03:04 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 12:03:07 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 12:03:07 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 12:03:07 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 12:03:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:03:07 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:03:07 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:03:07 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 12:03:07 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 110 ms +2016-04-08 12:03:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 12:03:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 12:03:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 12:03:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 12:03:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 12:03:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 12:03:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 12:03:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 12:03:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 12:03:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 12:03:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 12:03:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 12:03:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 12:03:07 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 12:03:07 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 12:03:07 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:03:07 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 12:03:07 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@5fc56614 +2016-04-08 12:03:07 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@5be651a5 +2016-04-08 12:03:07 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@142704b8 +2016-04-08 12:03:07 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@e14bf7c +2016-04-08 12:03:07 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 105 ms +2016-04-08 12:03:07 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 12:03:07 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 12:03:07 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 12:03:07 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 12:03:07 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 12:03:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:03:07 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:03:07 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 12:03:07 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-08 12:03:07 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 12:03:07 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:03:07 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 12:03:07 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:03:08 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:03:08 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 12:03:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:03:11 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 12:03:11 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:03:11 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 12:03:11 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:03:11 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:03:12 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 12:03:12 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 12:03:12 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 12:03:12 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 12:03:12 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 12:03:12 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 12:03:12 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 12:03:12 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 12:03:12 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 12:03:12 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 12:03:12 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:03:12 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 12:03:12 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 12:03:12 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 12:03:12 DEBUG WPS2SM:201 - Schema: null +2016-04-08 12:03:12 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 12:03:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 12:03:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 12:03:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:03:12 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 12:03:12 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 12:03:12 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:03:12 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:03:12 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 12:03:12 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 12:03:12 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 12:03:12 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 12:03:12 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 12:03:12 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 12:03:12 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 12:03:12 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 12:03:12 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 12:03:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 12:03:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 12:03:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:03:12 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 12:03:12 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 12:03:12 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:03:12 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:03:12 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 12:03:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 12:03:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 12:03:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:03:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 12:03:12 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 12:03:12 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:03:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:03:12 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 12:03:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 12:03:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 12:03:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:03:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 12:03:12 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 12:03:12 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:03:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:03:12 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 12:03:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 12:03:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 12:03:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:03:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 12:03:12 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 12:03:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:03:12 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:03:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:03:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:03:12 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:03:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:03:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:03:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:03:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:03:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:03:12 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:03:12 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:03:12 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:03:12 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:03:12 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:03:12 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 12:03:12 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 12:03:12 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 12:03:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 12:03:12 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:03:12 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 12:03:12 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-08 12:03:12 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 12:03:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:03:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:03:12 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:03:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:03:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:03:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:03:12 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 12:03:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:03:12 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 12:03:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:03:12 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:03:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:03:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:03:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 12:03:12 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 12:03:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 12:03:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:03:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:03:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:03:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:03:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:03:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:03:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:03:12 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 12:03:13 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 12:03:13 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 12:03:13 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 12:03:13 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 12:03:13 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 12:03:13 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 12:03:13 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:03:13 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:03:13 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:03:13 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:03:13 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:03:13 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:03:13 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:03:13 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:03:13 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 12:03:13 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:03:13 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:03:13 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:03:13 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:03:13 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:03:13 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:03:13 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 12:03:13 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:03:13 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:03:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:03:13 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:03:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:03:13 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:03:13 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:03:13 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:03:13 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:03:13 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:03:13 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:03:13 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:03:13 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 12:03:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 12:03:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 12:03:13 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:03:13 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 12:03:13 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 24 ms +2016-04-08 12:03:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 12:03:13 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:03:13 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 12:03:13 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-08 12:03:13 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 12:03:13 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 12:03:13 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 12:03:13 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 12:03:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 12:03:13 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 12:03:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 12:03:13 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:03:13 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 12:03:13 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 18 ms +2016-04-08 12:03:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 12:03:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 12:03:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 12:03:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 12:03:13 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 12:03:13 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 12:03:13 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 12:03:13 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:03:13 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 12:03:13 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:03:13 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 12:03:13 INFO WorkspaceExplorerServiceImpl:142 - end time - 467 msc 0 sec +2016-04-08 12:03:13 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 12:04:26 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 12:04:26 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 12:04:26 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 12:04:26 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 12:04:26 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 12:04:26 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 12:04:26 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 12:04:26 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@6f265e1d +2016-04-08 12:04:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:04:26 INFO ASLSession:352 - Logging the entrance +2016-04-08 12:04:26 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 12:04:26 DEBUG TemplateModel:83 - 2016-04-08 12:04:26, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 12:04:26 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:04:26 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 12:04:30 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 12:04:30 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 12:04:30 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 12:04:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:04:30 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:04:30 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:04:30 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 12:04:30 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 125 ms +2016-04-08 12:04:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 12:04:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 12:04:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 12:04:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 12:04:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 12:04:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 12:04:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 12:04:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 12:04:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 12:04:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 12:04:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 12:04:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 12:04:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 12:04:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 12:04:30 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 12:04:30 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:04:30 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 12:04:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@53a8efb9 +2016-04-08 12:04:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@69275210 +2016-04-08 12:04:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@22a53b05 +2016-04-08 12:04:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@28e0d21a +2016-04-08 12:04:30 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 129 ms +2016-04-08 12:04:30 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 12:04:30 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 12:04:30 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 12:04:30 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 12:04:30 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 12:04:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:04:30 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:04:30 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 12:04:30 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 31 ms +2016-04-08 12:04:30 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 12:04:30 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:04:30 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 12:04:30 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:04:31 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:04:31 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 12:04:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:04:34 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:04:34 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:04:34 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 12:04:34 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:04:34 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:04:35 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 12:04:35 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 12:04:35 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 12:04:35 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 12:04:35 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 12:04:35 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 12:04:35 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 12:04:35 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 12:04:35 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 12:04:35 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 12:04:35 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:04:35 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 12:04:35 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 12:04:35 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 12:04:35 DEBUG WPS2SM:201 - Schema: null +2016-04-08 12:04:35 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 12:04:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 12:04:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 12:04:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:04:35 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 12:04:35 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 12:04:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:04:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:04:35 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 12:04:35 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 12:04:35 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 12:04:35 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 12:04:35 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 12:04:35 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 12:04:35 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 12:04:35 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 12:04:35 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 12:04:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 12:04:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 12:04:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:04:35 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 12:04:35 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 12:04:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:04:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:04:35 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 12:04:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 12:04:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 12:04:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:04:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 12:04:35 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 12:04:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:04:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:04:35 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 12:04:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 12:04:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 12:04:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:04:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 12:04:35 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 12:04:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:04:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:04:35 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 12:04:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 12:04:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 12:04:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:04:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 12:04:35 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 12:04:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:04:35 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:04:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:04:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:04:35 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 12:04:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:04:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:04:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:04:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:04:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:04:35 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:04:35 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:04:35 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:04:35 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:04:35 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:04:35 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 12:04:35 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 12:04:35 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 12:04:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:04:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:04:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 12:04:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 23 ms +2016-04-08 12:04:35 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 12:04:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:04:35 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:04:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:04:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:04:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:04:35 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 12:04:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:04:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:04:35 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 12:04:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:04:35 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:04:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:04:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:04:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:04:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:04:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:04:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:04:35 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:04:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:04:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:04:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:04:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:04:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:04:35 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 12:04:35 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 12:04:35 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 12:04:35 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 12:04:35 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 12:04:35 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 12:04:35 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 12:04:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:04:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:04:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:04:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:04:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:04:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:04:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:04:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:04:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:04:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:04:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:04:35 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 12:04:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:04:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:04:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 12:04:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:04:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:04:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:04:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:04:36 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:04:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:04:36 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:04:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:04:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:04:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:04:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:04:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:04:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:04:36 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 12:04:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 12:04:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:04:36 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:04:36 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 12:04:36 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 31 ms +2016-04-08 12:04:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 12:04:36 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:04:36 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 12:04:36 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 12:04:36 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 12:04:36 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 12:04:36 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 12:04:36 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 12:04:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 12:04:36 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 12:04:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 12:04:36 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:04:36 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 12:04:36 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 12:04:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 12:04:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 12:04:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 12:04:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:04:36 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 12:04:36 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 12:04:36 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 12:04:36 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:04:36 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 12:04:36 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:04:36 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 12:04:36 INFO WorkspaceExplorerServiceImpl:142 - end time - 438 msc 0 sec +2016-04-08 12:04:36 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 12:05:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:05:21 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:05:46 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 12:05:46 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 12:05:46 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 12:05:46 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 12:05:46 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 12:05:46 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:05:46 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 12:05:46 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@33d04854 +2016-04-08 12:05:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:05:46 INFO ASLSession:352 - Logging the entrance +2016-04-08 12:05:46 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 12:05:46 DEBUG TemplateModel:83 - 2016-04-08 12:05:46, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 12:05:46 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:05:46 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 12:05:49 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 12:05:49 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 12:05:49 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 12:05:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:05:49 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:05:49 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:05:49 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 12:05:49 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 129 ms +2016-04-08 12:05:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 12:05:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 12:05:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 12:05:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 12:05:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 12:05:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 12:05:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 12:05:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 12:05:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 12:05:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 12:05:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 12:05:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 12:05:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 12:05:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 12:05:49 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 12:05:49 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:05:49 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 12:05:50 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@19df5bbb +2016-04-08 12:05:50 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@e985193 +2016-04-08 12:05:50 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@69e724b3 +2016-04-08 12:05:50 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@7fdcaff6 +2016-04-08 12:05:50 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 112 ms +2016-04-08 12:05:50 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 12:05:50 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 12:05:50 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 12:05:50 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 12:05:50 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 12:05:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:05:50 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:05:50 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 12:05:50 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-08 12:05:50 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 12:05:50 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:05:50 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 12:05:50 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:05:51 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:05:51 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 12:05:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:05:54 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:05:54 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:05:54 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 12:05:54 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:05:54 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:05:54 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 12:05:54 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 12:05:54 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 12:05:54 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 12:05:54 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 12:05:54 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 12:05:54 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 12:05:54 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 12:05:54 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 12:05:54 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 12:05:54 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:05:54 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 12:05:54 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 12:05:54 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 12:05:54 DEBUG WPS2SM:201 - Schema: null +2016-04-08 12:05:54 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 12:05:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 12:05:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 12:05:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:05:54 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 12:05:54 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 12:05:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:05:54 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:05:54 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 12:05:54 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 12:05:54 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 12:05:54 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 12:05:54 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 12:05:54 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 12:05:54 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 12:05:54 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 12:05:54 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 12:05:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 12:05:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 12:05:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:05:54 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 12:05:54 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 12:05:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:05:54 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:05:54 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 12:05:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 12:05:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 12:05:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:05:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 12:05:54 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 12:05:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:05:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:05:54 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 12:05:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 12:05:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 12:05:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:05:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 12:05:54 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 12:05:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:05:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:05:54 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 12:05:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 12:05:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 12:05:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:05:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 12:05:54 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 12:05:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:05:54 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:05:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:05:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:05:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:05:54 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 12:05:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:05:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:05:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:05:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:05:54 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:05:54 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:05:54 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:05:54 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:05:54 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:05:54 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 12:05:54 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 12:05:54 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 12:05:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 12:05:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:05:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 12:05:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 12:05:55 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 12:05:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:05:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:05:55 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:05:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:05:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:05:55 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 12:05:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:05:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:05:55 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 12:05:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:05:55 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:05:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:05:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:05:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:05:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:05:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:05:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:05:55 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:05:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:05:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:05:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:05:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:05:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:05:55 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 12:05:55 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 12:05:55 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 12:05:55 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 12:05:55 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 12:05:55 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 12:05:55 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 12:05:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:05:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:05:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:05:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:05:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:05:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:05:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:05:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:05:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:05:55 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 12:05:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:05:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:05:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 12:05:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:05:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:05:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:05:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:05:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:05:55 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 12:05:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:05:55 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:05:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:05:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:05:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:05:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:05:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:05:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:05:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:05:55 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 12:05:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 12:05:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:05:55 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:05:55 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 12:05:55 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 26 ms +2016-04-08 12:05:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 12:05:55 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:05:55 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 12:05:55 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-08 12:05:55 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 12:05:55 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 12:05:55 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 12:05:55 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 12:05:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 12:05:55 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 12:05:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 12:05:55 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:05:55 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 12:05:55 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-08 12:05:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 12:05:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 12:05:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 12:05:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:05:56 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 12:05:56 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 12:05:56 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 12:05:56 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:05:56 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 12:05:56 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:05:56 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 12:05:56 INFO WorkspaceExplorerServiceImpl:142 - end time - 478 msc 0 sec +2016-04-08 12:05:56 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 12:09:10 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 12:09:10 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 12:09:10 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 12:09:10 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 12:09:10 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 12:09:10 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:09:10 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 12:09:10 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@5ad11576 +2016-04-08 12:09:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:09:10 INFO ASLSession:352 - Logging the entrance +2016-04-08 12:09:10 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 12:09:10 DEBUG TemplateModel:83 - 2016-04-08 12:09:10, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 12:09:10 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:09:10 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 12:09:13 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 12:09:13 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 12:09:13 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 12:09:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:09:13 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 12:09:13 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:09:13 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 12:09:13 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 126 ms +2016-04-08 12:09:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 12:09:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 12:09:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 12:09:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 12:09:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 12:09:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 12:09:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 12:09:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 12:09:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 12:09:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 12:09:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 12:09:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 12:09:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 12:09:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 12:09:13 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 12:09:13 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:09:13 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 12:09:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6a8d3f0f +2016-04-08 12:09:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@a2c7966 +2016-04-08 12:09:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@f5dca1a +2016-04-08 12:09:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@c780239 +2016-04-08 12:09:13 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 102 ms +2016-04-08 12:09:13 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 12:09:13 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 12:09:13 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 12:09:13 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 12:09:13 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 12:09:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:09:13 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:09:13 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 12:09:13 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 25 ms +2016-04-08 12:09:13 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 12:09:13 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:09:13 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 12:09:13 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:09:14 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:09:14 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 12:09:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:09:19 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:09:19 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:09:19 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 12:09:19 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:09:19 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:09:19 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 12:09:19 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 12:09:19 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 12:09:19 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 12:09:19 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 12:09:19 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 12:09:19 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 12:09:19 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 12:09:19 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 12:09:19 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 12:09:19 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:09:19 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 12:09:19 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 12:09:19 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 12:09:19 DEBUG WPS2SM:201 - Schema: null +2016-04-08 12:09:19 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 12:09:19 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 12:09:19 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 12:09:19 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:09:19 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 12:09:19 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 12:09:19 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:09:19 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:09:19 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 12:09:19 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 12:09:19 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 12:09:19 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 12:09:19 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 12:09:19 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 12:09:19 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 12:09:19 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 12:09:19 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 12:09:19 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 12:09:19 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 12:09:19 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:09:19 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 12:09:19 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 12:09:19 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:09:19 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:09:19 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 12:09:19 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 12:09:19 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 12:09:19 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:09:19 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 12:09:19 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 12:09:19 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:09:19 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:09:19 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 12:09:19 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 12:09:19 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 12:09:19 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:09:19 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 12:09:19 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 12:09:19 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:09:19 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:09:19 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 12:09:19 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 12:09:19 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 12:09:19 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:09:19 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 12:09:19 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 12:09:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:09:19 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:09:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:09:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:09:19 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:09:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:09:19 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:09:19 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:09:19 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:09:19 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:09:20 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:09:20 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:09:20 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:09:20 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:09:20 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:09:20 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 12:09:20 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 12:09:20 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 12:09:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 12:09:20 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:09:20 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 12:09:20 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 12:09:20 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 12:09:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:09:20 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:09:20 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:09:20 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:09:20 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:09:20 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 12:09:20 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:09:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:09:20 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 12:09:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 12:09:20 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 12:09:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 12:09:20 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:09:20 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:09:20 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:09:20 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:09:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:09:20 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 12:09:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:09:20 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:09:20 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:09:20 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:09:20 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:09:20 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 12:09:20 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 12:09:20 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 12:09:20 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 12:09:20 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 12:09:20 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 12:09:20 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 12:09:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:09:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:09:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:09:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:09:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:09:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:09:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:09:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:09:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:09:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:09:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:09:20 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 12:09:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:09:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 12:09:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:09:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:09:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:09:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:09:20 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:09:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:09:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 12:09:20 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 12:09:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:09:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:09:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:09:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:09:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:09:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:09:20 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 12:09:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:09:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:09:20 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:09:20 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 12:09:21 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 21 ms +2016-04-08 12:09:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:09:21 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:09:21 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 12:09:21 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 12:09:21 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 12:09:21 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 12:09:21 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 12:09:21 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 12:09:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:09:21 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 12:09:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:09:21 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:09:21 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 12:09:21 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-08 12:09:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:09:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:09:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:09:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:09:21 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 12:09:21 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 12:09:21 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 12:09:21 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:09:21 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 12:09:21 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:09:21 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 12:09:21 INFO WorkspaceExplorerServiceImpl:142 - end time - 430 msc 0 sec +2016-04-08 12:09:21 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 12:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:10:05 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:11:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 12:11:00 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 12:11:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:11:55 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 12:12:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:12:50 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:13:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:13:45 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:14:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:14:40 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:15:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:15:35 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:16:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:16:30 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:17:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:17:25 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 12:18:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:18:20 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:19:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:19:15 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:20:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:20:10 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:21:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:21:05 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:24:59 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 12:24:59 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 12:24:59 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 12:24:59 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 12:24:59 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 12:24:59 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 12:24:59 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 12:24:59 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@4672c7b7 +2016-04-08 12:24:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:24:59 INFO ASLSession:352 - Logging the entrance +2016-04-08 12:24:59 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 12:24:59 DEBUG TemplateModel:83 - 2016-04-08 12:24:59, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 12:24:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:24:59 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 12:25:02 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 12:25:02 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 12:25:02 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 12:25:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:25:02 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 12:25:02 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:25:02 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 12:25:02 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 130 ms +2016-04-08 12:25:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 12:25:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 12:25:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 12:25:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 12:25:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 12:25:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 12:25:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 12:25:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 12:25:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 12:25:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 12:25:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 12:25:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 12:25:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 12:25:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 12:25:02 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 12:25:03 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:25:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 12:25:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@3078338d +2016-04-08 12:25:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@339790f9 +2016-04-08 12:25:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@52410ce5 +2016-04-08 12:25:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@5ba36576 +2016-04-08 12:25:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 102 ms +2016-04-08 12:25:03 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 12:25:03 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 12:25:03 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 12:25:03 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 12:25:03 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 12:25:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:25:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:25:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 12:25:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-08 12:25:03 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 12:25:03 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:25:03 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 12:25:03 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:25:04 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:25:04 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 12:25:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:25:07 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:25:07 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:25:07 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 12:25:07 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:25:07 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:25:07 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 12:25:07 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 12:25:07 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 12:25:07 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 12:25:07 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 12:25:07 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 12:25:07 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 12:25:07 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 12:25:07 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 12:25:07 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 12:25:07 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:25:07 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 12:25:07 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 12:25:07 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 12:25:07 DEBUG WPS2SM:201 - Schema: null +2016-04-08 12:25:07 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 12:25:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 12:25:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 12:25:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:25:07 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 12:25:07 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 12:25:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:25:07 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:25:07 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 12:25:07 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 12:25:07 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 12:25:07 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 12:25:07 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 12:25:07 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 12:25:07 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 12:25:07 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 12:25:07 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 12:25:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 12:25:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 12:25:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:25:07 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 12:25:07 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 12:25:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:25:07 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:25:07 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 12:25:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 12:25:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 12:25:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:25:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 12:25:07 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 12:25:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:25:07 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:25:07 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 12:25:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 12:25:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 12:25:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:25:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 12:25:07 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 12:25:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:25:07 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:25:07 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 12:25:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 12:25:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 12:25:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:25:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 12:25:07 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 12:25:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:25:07 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:25:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 12:25:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:25:07 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 12:25:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 12:25:07 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:25:07 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:25:07 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:25:07 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:25:08 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:25:08 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:25:08 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:25:08 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:25:08 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:25:08 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 12:25:08 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 12:25:08 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 12:25:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 12:25:08 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:25:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 12:25:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 76 ms +2016-04-08 12:25:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:25:08 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:25:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:25:08 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:25:08 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:25:08 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:25:08 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:25:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:25:08 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 12:25:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:25:08 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:25:08 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:25:08 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:25:08 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:25:08 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 12:25:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:25:08 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:25:08 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:25:08 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 12:25:08 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:25:08 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:25:08 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:25:08 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:25:08 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:25:08 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:25:08 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:25:08 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:25:08 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:25:08 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:25:08 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 12:25:08 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 12:25:08 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 12:25:08 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 12:25:08 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 12:25:08 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 12:25:08 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 12:25:08 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 12:25:08 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:25:08 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:25:08 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:25:08 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:25:08 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:25:08 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:25:08 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:25:08 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:25:08 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:25:08 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:25:08 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:25:08 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 12:25:08 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:25:08 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:25:08 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 12:25:08 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:25:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:25:08 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:25:08 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:25:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:25:08 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:25:08 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:25:08 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:25:08 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:25:08 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:25:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:25:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:25:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:25:09 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 12:25:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:25:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:25:09 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:25:09 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 12:25:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 33 ms +2016-04-08 12:25:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:25:09 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:25:09 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 12:25:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 12:25:09 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 12:25:09 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 12:25:09 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 12:25:09 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 12:25:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:25:09 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 12:25:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:25:09 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:25:09 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 12:25:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 12:25:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:25:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:25:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:25:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:25:09 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 12:25:09 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 12:25:09 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 12:25:09 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:25:09 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 12:25:09 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:25:09 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 12:25:09 INFO WorkspaceExplorerServiceImpl:142 - end time - 455 msc 0 sec +2016-04-08 12:25:09 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 12:25:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 12:25:54 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 12:26:52 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 12:26:52 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 12:26:52 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 12:26:52 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 12:26:52 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 12:26:52 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:26:52 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 12:26:52 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@29186429 +2016-04-08 12:26:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:26:52 INFO ASLSession:352 - Logging the entrance +2016-04-08 12:26:52 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 12:26:52 DEBUG TemplateModel:83 - 2016-04-08 12:26:52, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 12:26:52 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:26:52 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 12:26:55 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 12:26:55 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 12:26:55 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 12:26:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:26:55 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:26:55 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:26:55 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 12:26:55 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 126 ms +2016-04-08 12:26:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 12:26:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 12:26:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 12:26:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 12:26:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 12:26:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 12:26:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 12:26:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 12:26:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 12:26:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 12:26:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 12:26:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 12:26:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 12:26:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 12:26:56 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 12:26:56 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:26:56 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 12:26:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@92f766b +2016-04-08 12:26:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@23422f65 +2016-04-08 12:26:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@144e8b6d +2016-04-08 12:26:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@7bb1aa3b +2016-04-08 12:26:56 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 88 ms +2016-04-08 12:26:56 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 12:26:56 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 12:26:56 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 12:26:56 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 12:26:56 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 12:26:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:26:56 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:26:56 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 12:26:56 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-08 12:26:56 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 12:26:56 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:26:56 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 12:26:56 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:26:57 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:26:57 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 12:27:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:27:00 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:27:00 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:27:00 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 12:27:00 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:27:00 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:27:00 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 12:27:00 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 12:27:00 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 12:27:00 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 12:27:00 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 12:27:00 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 12:27:00 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 12:27:00 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 12:27:00 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 12:27:00 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 12:27:00 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:27:00 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 12:27:00 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 12:27:00 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 12:27:00 DEBUG WPS2SM:201 - Schema: null +2016-04-08 12:27:00 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 12:27:00 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 12:27:00 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 12:27:00 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:00 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 12:27:00 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 12:27:00 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:27:00 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:27:00 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 12:27:00 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 12:27:00 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 12:27:00 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 12:27:00 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 12:27:00 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 12:27:00 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 12:27:00 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 12:27:00 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 12:27:00 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 12:27:00 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 12:27:00 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:00 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 12:27:00 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 12:27:00 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:27:00 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:27:00 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 12:27:00 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 12:27:00 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 12:27:00 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:00 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 12:27:00 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 12:27:00 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:27:00 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:27:00 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 12:27:00 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 12:27:00 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 12:27:00 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:00 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 12:27:00 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 12:27:00 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:27:00 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:27:00 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 12:27:00 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 12:27:00 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 12:27:00 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:00 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 12:27:00 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 12:27:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:27:01 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:27:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:27:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:27:01 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:27:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:27:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:27:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:27:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:27:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:27:01 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:27:01 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:27:01 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:27:01 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:27:01 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:27:01 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 12:27:01 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 12:27:01 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 12:27:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 12:27:01 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:27:01 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 12:27:01 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 67 ms +2016-04-08 12:27:01 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 12:27:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:27:01 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:27:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:27:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:27:01 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 12:27:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:27:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:27:01 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 12:27:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:27:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:27:01 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 12:27:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:27:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:27:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:27:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:27:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:27:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:27:01 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:27:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:27:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:27:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:27:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:27:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:27:01 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 12:27:01 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 12:27:01 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 12:27:01 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 12:27:01 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 12:27:02 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 12:27:02 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 12:27:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:27:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:27:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:27:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:27:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:27:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:27:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:27:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:27:02 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 12:27:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 12:27:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:27:02 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:27:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:27:02 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:27:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:02 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 12:27:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:27:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:27:02 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:27:02 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 12:27:02 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 30 ms +2016-04-08 12:27:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:27:02 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:27:02 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 12:27:02 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 15 ms +2016-04-08 12:27:02 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 12:27:02 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 12:27:02 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 12:27:02 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 12:27:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:27:02 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 12:27:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:27:02 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:27:02 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 12:27:02 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-08 12:27:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:27:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:27:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 12:27:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:27:02 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 12:27:02 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 12:27:02 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 12:27:02 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:27:02 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 12:27:02 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:27:02 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 12:27:02 INFO WorkspaceExplorerServiceImpl:142 - end time - 444 msc 0 sec +2016-04-08 12:27:02 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 12:27:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:27:34 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:27:34 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:27:34 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-08 12:27:34 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:27:34 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:27:35 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 12:27:35 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 12:27:35 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 12:27:35 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 12:27:35 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 12:27:35 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-08 12:27:35 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-08 12:27:35 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-08 12:27:35 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 12:27:35 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 12:27:35 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 12:27:35 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 12:27:35 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:27:35 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 12:27:35 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 12:27:35 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 12:27:35 DEBUG WPS2SM:201 - Schema: null +2016-04-08 12:27:35 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 12:27:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 12:27:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 12:27:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:35 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 12:27:35 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 12:27:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:27:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:27:35 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 12:27:35 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 12:27:35 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 12:27:35 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 12:27:35 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 12:27:35 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 12:27:35 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 12:27:35 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 12:27:35 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 12:27:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 12:27:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 12:27:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:35 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 12:27:35 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 12:27:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:27:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:27:35 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 12:27:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 12:27:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 12:27:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 12:27:35 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-08 12:27:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:27:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:27:35 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-08 12:27:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-08 12:27:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-08 12:27:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-08 12:27:35 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-08 12:27:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:27:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:27:35 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 12:27:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-08 12:27:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-08 12:27:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 12:27:35 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-08 12:27:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:27:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:27:35 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-08 12:27:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-08 12:27:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-08 12:27:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-08 12:27:35 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 12:27:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:27:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:27:35 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 12:27:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 12:27:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 12:27:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 12:27:35 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 12:27:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:27:35 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:27:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:27:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:27:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:27:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:27:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:27:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:27:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:27:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:27:35 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:27:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:27:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:27:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:27:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:27:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:27:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:27:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:27:35 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 12:27:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:27:35 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:27:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:27:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:27:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:27:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:27:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:27:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:27:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:27:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:27:35 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:27:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:27:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:27:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:27:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:27:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:27:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:27:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:27:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 12:27:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:27:35 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:27:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:27:35 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:27:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:35 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 12:27:36 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 12:27:36 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 12:27:36 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 12:27:36 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:27:36 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 12:27:36 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:27:36 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 12:27:36 INFO WorkspaceExplorerServiceImpl:142 - end time - 214 msc 0 sec +2016-04-08 12:27:36 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 12:27:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:27:37 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:27:37 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:27:37 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-08 12:27:37 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:27:37 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:27:38 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 12:27:38 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 12:27:38 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 12:27:38 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-08 12:27:38 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-08 12:27:38 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-08 12:27:38 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-08 12:27:38 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-08 12:27:38 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-08 12:27:38 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 12:27:38 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 12:27:38 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 12:27:38 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:27:38 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-08 12:27:38 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 12:27:38 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 12:27:38 DEBUG WPS2SM:201 - Schema: null +2016-04-08 12:27:38 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 12:27:38 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 12:27:38 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-08 12:27:38 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:38 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 12:27:38 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 12:27:38 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:27:38 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:27:38 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 12:27:38 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 12:27:38 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 12:27:38 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-08 12:27:38 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 12:27:38 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-08 12:27:38 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 12:27:38 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-08 12:27:38 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 12:27:38 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-08 12:27:38 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 12:27:38 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:38 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 12:27:38 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-08 12:27:38 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:27:38 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:27:38 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-08 12:27:38 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 12:27:38 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-08 12:27:38 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:38 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-08 12:27:38 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-08 12:27:38 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:27:38 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:27:38 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 12:27:38 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-08 12:27:38 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-08 12:27:38 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:38 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 12:27:38 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-08 12:27:38 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:27:38 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:27:38 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 12:27:38 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-08 12:27:38 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-08 12:27:38 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:38 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 12:27:38 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-08 12:27:38 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:27:38 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:27:38 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-08 12:27:38 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-08 12:27:38 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-08 12:27:38 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-08 12:27:38 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:38 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-08 12:27:38 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-08 12:27:38 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:27:38 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:27:38 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 12:27:38 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-08 12:27:38 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-08 12:27:38 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:27:38 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 12:27:38 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 12:27:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:27:38 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:27:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:27:38 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:27:38 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:27:38 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:27:38 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:27:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:27:38 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:27:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:27:38 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 12:27:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:27:38 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:27:38 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:27:38 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:27:38 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:27:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:27:38 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:27:38 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 12:27:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 12:27:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:27:38 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:27:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:27:38 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:27:38 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:27:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:27:38 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:27:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:27:38 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:27:38 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:27:38 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:27:38 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:27:38 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:27:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:27:38 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:27:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:38 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:27:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:27:38 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:27:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:27:38 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:27:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:27:38 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:27:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:27:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:27:38 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 12:27:38 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 12:27:38 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 12:27:38 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 12:27:38 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:27:38 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 12:27:38 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:27:38 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 12:27:38 INFO WorkspaceExplorerServiceImpl:142 - end time - 225 msc 0 sec +2016-04-08 12:27:38 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 12:27:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:27:47 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:28:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:28:42 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:28:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:28:42 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:28:42 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:28:42 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 12:28:42 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:28:42 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:28:43 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 12:28:43 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 12:28:43 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 12:28:43 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 12:28:43 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 12:28:43 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 12:28:43 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 12:28:43 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 12:28:43 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 12:28:43 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 12:28:43 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:28:43 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 12:28:43 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 12:28:43 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 12:28:43 DEBUG WPS2SM:201 - Schema: null +2016-04-08 12:28:43 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 12:28:43 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 12:28:43 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 12:28:43 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:28:43 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 12:28:43 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 12:28:43 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:28:43 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:28:43 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 12:28:43 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 12:28:43 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 12:28:43 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 12:28:43 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 12:28:43 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 12:28:43 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 12:28:43 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 12:28:43 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 12:28:43 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 12:28:43 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 12:28:43 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:28:43 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 12:28:43 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 12:28:43 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:28:43 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:28:43 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 12:28:43 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 12:28:43 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 12:28:43 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:28:43 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 12:28:43 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 12:28:43 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:28:43 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:28:43 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 12:28:43 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 12:28:43 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 12:28:43 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:28:43 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 12:28:43 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 12:28:43 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:28:43 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:28:43 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 12:28:43 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 12:28:43 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 12:28:43 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:28:43 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 12:28:43 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 12:28:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:28:43 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:28:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:28:43 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:28:43 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:28:43 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:28:43 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:28:43 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:28:43 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:28:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:28:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:28:43 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 12:28:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:28:43 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:28:43 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:28:43 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:28:43 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:28:43 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:28:43 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:28:43 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 12:28:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:28:43 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:28:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:28:43 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:28:43 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:28:43 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:28:43 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:28:43 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:28:43 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:28:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:28:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:28:43 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:28:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:28:43 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:28:43 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:28:43 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:28:43 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:28:43 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:28:43 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:28:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:28:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 12:28:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:28:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:28:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:28:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:28:43 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:28:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:28:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:28:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:28:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:28:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:28:43 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:28:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:28:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:28:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:28:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:28:43 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 12:28:43 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 12:28:43 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 12:28:43 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 12:28:43 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:28:43 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 12:28:43 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:28:43 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 12:28:43 INFO WorkspaceExplorerServiceImpl:142 - end time - 200 msc 0 sec +2016-04-08 12:28:43 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 12:29:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:29:37 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:30:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:30:32 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:31:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:31:27 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:32:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:32:22 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:32:55 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 12:32:55 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 12:32:55 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 12:32:55 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 12:32:55 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 12:32:55 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 12:32:55 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 12:32:55 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@110c9a22 +2016-04-08 12:32:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 12:32:55 INFO ASLSession:352 - Logging the entrance +2016-04-08 12:32:55 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 12:32:55 DEBUG TemplateModel:83 - 2016-04-08 12:32:55, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 12:32:55 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:32:55 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 12:32:57 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 12:32:57 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 12:32:57 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 12:32:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:32:57 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:32:57 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:32:57 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 12:32:58 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 123 ms +2016-04-08 12:32:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 12:32:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 12:32:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 12:32:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 12:32:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 12:32:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 12:32:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 12:32:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 12:32:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 12:32:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 12:32:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 12:32:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 12:32:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 12:32:58 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 12:32:58 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 12:32:58 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:32:58 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 12:32:58 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@9c79289 +2016-04-08 12:32:58 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@59037c06 +2016-04-08 12:32:58 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@5c5c3fc1 +2016-04-08 12:32:58 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@7e1d4993 +2016-04-08 12:32:58 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 126 ms +2016-04-08 12:32:58 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 12:32:58 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 12:32:58 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 12:32:58 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 12:32:58 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 12:32:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:32:58 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:32:58 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 12:32:58 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-08 12:32:58 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 12:32:58 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:32:58 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 12:32:58 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:32:59 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:32:59 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 12:33:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:33:02 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:33:02 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:33:02 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 12:33:02 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:33:02 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:33:02 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 12:33:02 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 12:33:02 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 12:33:02 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 12:33:02 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 12:33:02 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 12:33:02 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 12:33:02 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 12:33:02 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 12:33:02 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 12:33:02 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:33:02 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 12:33:02 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 12:33:02 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 12:33:02 DEBUG WPS2SM:201 - Schema: null +2016-04-08 12:33:02 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 12:33:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 12:33:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 12:33:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:33:02 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 12:33:02 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 12:33:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:33:02 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:33:02 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 12:33:02 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 12:33:02 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 12:33:02 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 12:33:02 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 12:33:02 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 12:33:02 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 12:33:02 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 12:33:02 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 12:33:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 12:33:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 12:33:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:33:02 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 12:33:02 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 12:33:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:33:02 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:33:02 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 12:33:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 12:33:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 12:33:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:33:02 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 12:33:02 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 12:33:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:33:02 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:33:02 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 12:33:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 12:33:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 12:33:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:33:02 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 12:33:02 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 12:33:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:33:02 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:33:02 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 12:33:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 12:33:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 12:33:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:33:02 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 12:33:02 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 12:33:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:33:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:33:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:33:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:33:03 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:33:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:33:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:33:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:33:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:33:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:33:03 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:33:03 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:33:03 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:33:03 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 12:33:03 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 12:33:03 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 12:33:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 12:33:03 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:33:03 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:33:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:33:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 12:33:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 72 ms +2016-04-08 12:33:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:33:03 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:33:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:33:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:33:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:33:03 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:33:03 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:33:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:33:03 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:33:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:33:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:33:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:33:03 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:33:03 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:33:03 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 12:33:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:33:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:33:03 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:33:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:33:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:33:03 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:33:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:33:03 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 12:33:03 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:33:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:33:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:33:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:33:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:33:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:33:03 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 12:33:03 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 12:33:03 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 12:33:03 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 12:33:03 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 12:33:03 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 12:33:03 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 12:33:03 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 12:33:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:33:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:33:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:33:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:33:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:33:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:33:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:33:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:33:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:33:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:33:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:33:03 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 12:33:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:33:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:33:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 12:33:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:33:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:33:03 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:33:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:33:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:33:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:33:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:33:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:33:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:33:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:33:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:33:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:33:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:33:04 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 12:33:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 12:33:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:33:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:33:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 12:33:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 31 ms +2016-04-08 12:33:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 12:33:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:33:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 12:33:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 12:33:04 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 12:33:04 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 12:33:04 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 12:33:04 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 12:33:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 12:33:04 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 12:33:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 12:33:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:33:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 12:33:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 12:33:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 12:33:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 12:33:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 12:33:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:33:04 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 12:33:04 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 12:33:04 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 12:33:04 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:33:04 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 12:33:04 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:33:04 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 12:33:04 INFO WorkspaceExplorerServiceImpl:142 - end time - 462 msc 0 sec +2016-04-08 12:33:04 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 12:33:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:33:50 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:34:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:34:45 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:35:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:35:40 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:36:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:36:35 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:37:14 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 12:37:14 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 12:37:14 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 12:37:14 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 12:37:14 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 12:37:14 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:37:14 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 12:37:14 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@48cc257d +2016-04-08 12:37:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:37:14 INFO ASLSession:352 - Logging the entrance +2016-04-08 12:37:14 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 12:37:14 DEBUG TemplateModel:83 - 2016-04-08 12:37:14, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 12:37:14 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:37:14 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 12:37:18 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 12:37:18 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 12:37:18 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 12:37:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:37:18 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:37:18 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:37:18 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 12:37:18 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 133 ms +2016-04-08 12:37:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 12:37:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 12:37:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 12:37:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 12:37:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 12:37:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 12:37:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 12:37:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 12:37:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 12:37:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 12:37:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 12:37:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 12:37:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 12:37:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 12:37:18 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 12:37:18 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:37:18 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 12:37:18 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@4bd6515d +2016-04-08 12:37:18 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@4a3259c2 +2016-04-08 12:37:18 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@e2de9ce +2016-04-08 12:37:18 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@541e4e63 +2016-04-08 12:37:18 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 105 ms +2016-04-08 12:37:18 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 12:37:19 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 12:37:19 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 12:37:19 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 12:37:19 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 12:37:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:37:19 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:37:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 12:37:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 34 ms +2016-04-08 12:37:19 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 12:37:19 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:37:19 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 12:37:19 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:37:20 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:37:20 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 12:37:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:37:23 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:37:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:37:23 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 12:37:23 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:37:23 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:37:23 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 12:37:23 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 12:37:23 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 12:37:23 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 12:37:23 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 12:37:23 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 12:37:23 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 12:37:23 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 12:37:23 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 12:37:23 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 12:37:23 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:37:23 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 12:37:23 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 12:37:23 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 12:37:23 DEBUG WPS2SM:201 - Schema: null +2016-04-08 12:37:23 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 12:37:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 12:37:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 12:37:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:37:23 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 12:37:23 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 12:37:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:37:23 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:37:23 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 12:37:23 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 12:37:23 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 12:37:23 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 12:37:23 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 12:37:23 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 12:37:23 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 12:37:23 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 12:37:23 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 12:37:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 12:37:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 12:37:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:37:23 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 12:37:23 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 12:37:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:37:23 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:37:23 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 12:37:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 12:37:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 12:37:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:37:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 12:37:23 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 12:37:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:37:23 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:37:23 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 12:37:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 12:37:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 12:37:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:37:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 12:37:23 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 12:37:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:37:23 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:37:23 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 12:37:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 12:37:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 12:37:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:37:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 12:37:23 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 12:37:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:37:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:37:24 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:37:24 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:37:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:37:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:37:24 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:37:24 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:37:24 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:37:24 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:37:24 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:37:24 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:37:24 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:37:24 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:37:24 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:37:24 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 12:37:24 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 12:37:24 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 12:37:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 12:37:24 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:37:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 12:37:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:37:24 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:37:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:37:24 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:37:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:37:24 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:37:24 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:37:24 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:37:24 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:37:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:37:24 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:37:24 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:37:24 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 12:37:24 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 12:37:25 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 693 ms +2016-04-08 12:37:25 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 12:37:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:37:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:37:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:37:25 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 12:37:25 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:37:25 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:37:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:37:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:37:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:37:25 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 12:37:25 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:37:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:37:25 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 12:37:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:37:25 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:37:25 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 12:37:25 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 12:37:25 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 12:37:25 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 12:37:25 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 12:37:25 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 12:37:25 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 12:37:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:37:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:37:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:37:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:37:25 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:37:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:37:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:37:25 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:37:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:37:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:37:25 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 12:37:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:37:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:37:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 12:37:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:37:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:37:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:37:25 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:37:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:37:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:37:25 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:37:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:37:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:37:25 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 12:37:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:37:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:37:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:37:25 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 12:37:25 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 12:37:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 12:37:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:37:26 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:37:26 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 12:37:26 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 32 ms +2016-04-08 12:37:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 12:37:26 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:37:26 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 12:37:26 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 12:37:26 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 12:37:26 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 12:37:26 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 12:37:26 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 12:37:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 12:37:26 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 12:37:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 12:37:26 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 12:37:26 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 12:37:26 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 15 ms +2016-04-08 12:37:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 12:37:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 12:37:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 12:37:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:37:26 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 12:37:26 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 12:37:26 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 12:37:26 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:37:26 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 12:37:26 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 12:37:26 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 12:37:26 INFO WorkspaceExplorerServiceImpl:142 - end time - 438 msc 0 sec +2016-04-08 12:37:26 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 12:38:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:38:09 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:39:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:39:04 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:39:27 INFO WorkspaceExplorerServiceImpl:600 - getFormattedSize ByItemId 6543184f-3c92-4982-88ed-1287d525eca2 +2016-04-08 12:39:27 INFO WorkspaceExplorerServiceImpl:486 - get Size By ItemId 6543184f-3c92-4982-88ed-1287d525eca2 +2016-04-08 12:39:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:39:27 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:39:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:39:27 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:39:27 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:39:27 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:39:27 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:39:27 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:39:27 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:39:27 INFO WorkspaceExplorerServiceImpl:549 - Get user ACL to FOLDER id: 6543184f-3c92-4982-88ed-1287d525eca2 +2016-04-08 12:39:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:39:27 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:39:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:39:27 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:39:27 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:39:27 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:39:27 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:39:27 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:39:27 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:39:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:39:27 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:39:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:39:27 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:39:27 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:39:27 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:39:27 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:39:27 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:39:27 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:39:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:39:28 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:39:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:39:28 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:39:28 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:39:28 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:39:28 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:39:28 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:39:28 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:39:28 INFO JCRServlets:267 - Servlet getItemById 6543184f-3c92-4982-88ed-1287d525eca2 +2016-04-08 12:39:28 INFO JCRServlets:267 - Servlet getItemById 6543184f-3c92-4982-88ed-1287d525eca2 +2016-04-08 12:39:28 INFO JCRServlets:267 - Servlet getItemById 6543184f-3c92-4982-88ed-1287d525eca2 +2016-04-08 12:39:28 INFO JCRServlets:267 - Servlet getItemById 6543184f-3c92-4982-88ed-1287d525eca2 +2016-04-08 12:39:28 INFO JCRServlets:697 - Calling Servlet get Parents By Id 6543184f-3c92-4982-88ed-1287d525eca2 by giancarlo.panichi +2016-04-08 12:39:28 INFO JCRServlets:142 - Calling servlet getChildrenById 6543184f-3c92-4982-88ed-1287d525eca2 by giancarlo.panichi +2016-04-08 12:39:28 INFO JCRServlets:142 - Calling servlet getChildrenById 6543184f-3c92-4982-88ed-1287d525eca2 by giancarlo.panichi +2016-04-08 12:39:28 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-08 12:39:28 DEBUG ServiceEngine:306 - get() - start +2016-04-08 12:39:28 INFO WorkspaceExplorerServiceImpl:188 - end time - 75 msc 0 sec +2016-04-08 12:39:28 DEBUG ItemBuilder:361 - breadcrumb returning: Item [id=6543184f-3c92-4982-88ed-1287d525eca2, name=StatisticalTest, type=FOLDER, path=null, children=[], owner=null, isFolder=true, isSpecialFolder=false, isSharedFolder=false, isRoot=false, creationDate=null] +2016-04-08 12:39:28 DEBUG ItemBuilder:361 - breadcrumb returning: Item [id=efc48ebb-f682-4636-9a98-1cbee166e336, name=Home, type=FOLDER, path=null, children=[], owner=null, isFolder=true, isSpecialFolder=false, isSharedFolder=false, isRoot=true, creationDate=null] +2016-04-08 12:39:28 DEBUG BucketCoding:32 - Coding name: path: /Home/giancarlo.panichi/Workspace/StatisticalTest/ rootArea /gcube/home/org.gcube.portlets.user/test-home-library/ +2016-04-08 12:39:28 DEBUG BucketCoding:42 - coding name done +2016-04-08 12:39:28 DEBUG ServiceEngine:62 - path(String) - name: StatisticalTest +2016-04-08 12:39:28 DEBUG ServiceEngine:72 - path(String) - path: /gcube/home/org.gcube.portlets.user/test-home-library/Home/giancarlo.panichi/Workspace/ +2016-04-08 12:39:28 DEBUG OperationManager:67 - connection(boolean) - start +2016-04-08 12:39:28 INFO OperationManager:69 - startOpertion getResource..getGcubeAccessType()= null file..getGcubeAccessType() null +2016-04-08 12:39:28 INFO OperationFactory:39 - getOperation(String) - start getFolderSize +2016-04-08 12:39:28 DEBUG OperationFactory:97 - getOperation(String) - end +2016-04-08 12:39:28 DEBUG GetSize:45 - remotePath: /Home/giancarlo.panichi/Workspace/StatisticalTest/ +2016-04-08 12:39:28 DEBUG BucketCoding:32 - Coding name: path: /Home/giancarlo.panichi/Workspace/StatisticalTest/ rootArea /gcube/home/org.gcube.portlets.user/test-home-library/ +2016-04-08 12:39:28 DEBUG BucketCoding:42 - coding name done +2016-04-08 12:39:28 DEBUG TransportManagerFactory:39 - getOperation(String) - start +2016-04-08 12:39:28 INFO TransportManagerFactory:54 - 0 implementation found. Load default implementation of TransportManager +2016-04-08 12:39:28 DEBUG MongoIO:77 - open mongo connection +2016-04-08 12:39:28 INFO cluster:71 - Cluster created with settings {hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], mode=MULTIPLE, requiredClusterType=UNKNOWN, serverSelectionTimeout='30000 ms', maxWaitQueueSize=150} +2016-04-08 12:39:28 INFO cluster:71 - Adding discovered server mongo2-d-d4s.d4science.org:27017 to client view of cluster +2016-04-08 12:39:28 INFO cluster:71 - Adding discovered server mongo3-d-d4s.d4science.org:27017 to client view of cluster +2016-04-08 12:39:28 DEBUG cluster:56 - Updating cluster description to {type=UNKNOWN, servers=[{address=mongo2-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, {address=mongo3-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}] +2016-04-08 12:39:28 DEBUG MongoIO:85 - Istantiate MongoDB with options: MongoClientOptions{description='null', readPreference=primaryPreferred, writeConcern=WriteConcern{w=1, wtimeout=0, fsync=false, j=false, codecRegistry=org.bson.codecs.configuration.ProvidersCodecRegistry@21203b74, minConnectionsPerHost=0, maxConnectionsPerHost=30, threadsAllowedToBlockForConnectionMultiplier=5, serverSelectionTimeout=30000, maxWaitTime=120000, maxConnectionIdleTime=0, maxConnectionLifeTime=0, connectTimeout=30000, socketTimeout=0, socketKeepAlive=false, sslEnabled=false, sslInvalidHostNamesAllowed=false, alwaysUseMBeans=false, heartbeatFrequency=10000, minHeartbeatFrequency=500, heartbeatConnectTimeout=20000, heartbeatSocketTimeout=20000, localThreshold=15, requiredReplicaSetName='null', dbDecoderFactory=com.mongodb.DefaultDBDecoder$1@20831427, dbEncoderFactory=com.mongodb.DefaultDBEncoder$1@7da01d1d, socketFactory=javax.net.DefaultSocketFactory@1ad79b5a, cursorFinalizerEnabled=true, connectionPoolSettings=ConnectionPoolSettings{maxSize=30, minSize=0, maxWaitQueueSize=150, maxWaitTimeMS=120000, maxConnectionLifeTimeMS=0, maxConnectionIdleTimeMS=0, maintenanceInitialDelayMS=0, maintenanceFrequencyMS=60000}, socketSettings=SocketSettings{connectTimeoutMS=30000, readTimeoutMS=0, keepAlive=false, receiveBufferSize=0, sendBufferSize=0}, serverSettings=ServerSettings{heartbeatFrequencyMS=10000, minHeartbeatFrequencyMS=500}, heartbeatSocketSettings=SocketSettings{connectTimeoutMS=20000, readTimeoutMS=20000, keepAlive=false, receiveBufferSize=0, sendBufferSize=0}} +2016-04-08 12:39:28 INFO MongoIO:106 - new mongo connection pool opened +2016-04-08 12:39:28 INFO cluster:71 - No server chosen by PrimaryServerSelector from cluster description ClusterDescription{type=UNKNOWN, connectionMode=MULTIPLE, all=[ServerDescription{address=mongo2-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, ServerDescription{address=mongo3-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}]}. Waiting for 30000 ms before timing out +2016-04-08 12:39:28 INFO connection:71 - Opened connection [connectionId{localValue:2, serverValue:1094341}] to mongo2-d-d4s.d4science.org:27017 +2016-04-08 12:39:28 INFO connection:71 - Opened connection [connectionId{localValue:1, serverValue:1377293}] to mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:28 DEBUG cluster:56 - Checking status of mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:28 DEBUG cluster:56 - Checking status of mongo2-d-d4s.d4science.org:27017 +2016-04-08 12:39:28 INFO cluster:71 - Monitor thread successfully connected to server with description ServerDescription{address=mongo2-d-d4s.d4science.org:27017, type=REPLICA_SET_OTHER, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 0, 8]}, minWireVersion=0, maxWireVersion=3, electionId=null, maxDocumentSize=16777216, roundTripTimeNanos=1370531, setName='storagedev', canonicalAddress=mongo2-d-d4s.d4science.org:27017, hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], passives=[mongo4-d-d4s.d4science.org:27017], arbiters=[mongo1-d-d4s.d4science.org:27017], primary='mongo3-d-d4s.d4science.org:27017', tagSet=TagSet{[]}} +2016-04-08 12:39:28 INFO cluster:71 - Monitor thread successfully connected to server with description ServerDescription{address=mongo3-d-d4s.d4science.org:27017, type=REPLICA_SET_PRIMARY, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 0, 8]}, minWireVersion=0, maxWireVersion=3, electionId=56c481dca32e5d3f9711532d, maxDocumentSize=16777216, roundTripTimeNanos=1694305, setName='storagedev', canonicalAddress=mongo3-d-d4s.d4science.org:27017, hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], passives=[mongo4-d-d4s.d4science.org:27017], arbiters=[mongo1-d-d4s.d4science.org:27017], primary='mongo3-d-d4s.d4science.org:27017', tagSet=TagSet{[]}} +2016-04-08 12:39:28 INFO cluster:71 - Discovered cluster type of REPLICA_SET +2016-04-08 12:39:28 INFO cluster:71 - Adding discovered server mongo4-d-d4s.d4science.org:27017 to client view of cluster +2016-04-08 12:39:28 INFO cluster:71 - Adding discovered server mongo1-d-d4s.d4science.org:27017 to client view of cluster +2016-04-08 12:39:28 INFO cluster:71 - Discovered replica set primary mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:28 DEBUG cluster:56 - Updating cluster description to {type=REPLICA_SET, servers=[{address=mongo1-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, {address=mongo2-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, {address=mongo3-d-d4s.d4science.org:27017, type=REPLICA_SET_PRIMARY, roundTripTime=1.7 ms, state=CONNECTED}, {address=mongo4-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}] +2016-04-08 12:39:28 DEBUG cluster:56 - Updating cluster description to {type=REPLICA_SET, servers=[{address=mongo1-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, {address=mongo2-d-d4s.d4science.org:27017, type=REPLICA_SET_OTHER, roundTripTime=1.4 ms, state=CONNECTED}, {address=mongo3-d-d4s.d4science.org:27017, type=REPLICA_SET_PRIMARY, roundTripTime=1.7 ms, state=CONNECTED}, {address=mongo4-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}] +2016-04-08 12:39:28 INFO connection:71 - Opened connection [connectionId{localValue:5, serverValue:1377294}] to mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:28 DEBUG command:56 - Sending command {createIndexes : BsonString{value='fs.files'}} to database remotefs on connection [connectionId{localValue:5, serverValue:1377294}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:28 DEBUG command:56 - Command execution completed +2016-04-08 12:39:28 DEBUG command:56 - Sending command {createIndexes : BsonString{value='fs.files'}} to database remotefs on connection [connectionId{localValue:5, serverValue:1377294}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:28 DEBUG command:56 - Command execution completed +2016-04-08 12:39:28 DEBUG command:56 - Sending command {createIndexes : BsonString{value='fs.files'}} to database remotefs on connection [connectionId{localValue:5, serverValue:1377294}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:28 DEBUG command:56 - Command execution completed +2016-04-08 12:39:28 DEBUG DefaultMongoClient:1324 - getFolderTotalVolume for folder /gcube/home/org.gcube.portlets.user/test-home-library/Home/giancarlo.panichi/Workspace/StatisticalTest +2016-04-08 12:39:28 DEBUG command:56 - Sending command {count : BsonString{value='fs.files'}} to database remotefs on connection [connectionId{localValue:5, serverValue:1377294}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:28 DEBUG command:56 - Command execution completed +2016-04-08 12:39:28 DEBUG command:56 - Sending command {count : BsonString{value='fs.chunks'}} to database remotefs on connection [connectionId{localValue:5, serverValue:1377294}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:28 DEBUG command:56 - Command execution completed +2016-04-08 12:39:28 DEBUG query:56 - Sending query of namespace remotefs.fs.files on connection [connectionId{localValue:5, serverValue:1377294}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:28 INFO connection:71 - Opened connection [connectionId{localValue:3, serverValue:1125556}] to mongo4-d-d4s.d4science.org:27017 +2016-04-08 12:39:28 DEBUG cluster:56 - Checking status of mongo4-d-d4s.d4science.org:27017 +2016-04-08 12:39:28 INFO cluster:71 - Monitor thread successfully connected to server with description ServerDescription{address=mongo4-d-d4s.d4science.org:27017, type=REPLICA_SET_SECONDARY, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 0, 7]}, minWireVersion=0, maxWireVersion=3, electionId=null, maxDocumentSize=16777216, roundTripTimeNanos=1754717, setName='storagedev', canonicalAddress=mongo4-d-d4s.d4science.org:27017, hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], passives=[mongo4-d-d4s.d4science.org:27017], arbiters=[mongo1-d-d4s.d4science.org:27017], primary='mongo3-d-d4s.d4science.org:27017', tagSet=TagSet{[]}} +2016-04-08 12:39:28 DEBUG cluster:56 - Updating cluster description to {type=REPLICA_SET, servers=[{address=mongo1-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, {address=mongo2-d-d4s.d4science.org:27017, type=REPLICA_SET_OTHER, roundTripTime=1.4 ms, state=CONNECTED}, {address=mongo3-d-d4s.d4science.org:27017, type=REPLICA_SET_PRIMARY, roundTripTime=1.7 ms, state=CONNECTED}, {address=mongo4-d-d4s.d4science.org:27017, type=REPLICA_SET_SECONDARY, roundTripTime=1.8 ms, state=CONNECTED}] +2016-04-08 12:39:28 INFO connection:71 - Opened connection [connectionId{localValue:4, serverValue:1714971}] to mongo1-d-d4s.d4science.org:27017 +2016-04-08 12:39:28 DEBUG cluster:56 - Checking status of mongo1-d-d4s.d4science.org:27017 +2016-04-08 12:39:28 INFO cluster:71 - Monitor thread successfully connected to server with description ServerDescription{address=mongo1-d-d4s.d4science.org:27017, type=REPLICA_SET_ARBITER, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 0, 8]}, minWireVersion=0, maxWireVersion=3, electionId=null, maxDocumentSize=16777216, roundTripTimeNanos=1247851, setName='storagedev', canonicalAddress=mongo1-d-d4s.d4science.org:27017, hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], passives=[mongo4-d-d4s.d4science.org:27017], arbiters=[mongo1-d-d4s.d4science.org:27017], primary='mongo3-d-d4s.d4science.org:27017', tagSet=TagSet{[]}} +2016-04-08 12:39:28 DEBUG cluster:56 - Updating cluster description to {type=REPLICA_SET, servers=[{address=mongo1-d-d4s.d4science.org:27017, type=REPLICA_SET_ARBITER, roundTripTime=1.2 ms, state=CONNECTED}, {address=mongo2-d-d4s.d4science.org:27017, type=REPLICA_SET_OTHER, roundTripTime=1.4 ms, state=CONNECTED}, {address=mongo3-d-d4s.d4science.org:27017, type=REPLICA_SET_PRIMARY, roundTripTime=1.7 ms, state=CONNECTED}, {address=mongo4-d-d4s.d4science.org:27017, type=REPLICA_SET_SECONDARY, roundTripTime=1.8 ms, state=CONNECTED}] +2016-04-08 12:39:29 INFO WorkspaceExplorerServiceImpl:600 - getFormattedSize ByItemId 7ba62008-739a-4787-ae33-7938ad109a54 +2016-04-08 12:39:29 INFO WorkspaceExplorerServiceImpl:486 - get Size By ItemId 7ba62008-739a-4787-ae33-7938ad109a54 +2016-04-08 12:39:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 12:39:29 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 12:39:29 INFO WorkspaceExplorerServiceImpl:549 - Get user ACL to FOLDER id: 7ba62008-739a-4787-ae33-7938ad109a54 +2016-04-08 12:39:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 12:39:29 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:39:29 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:39:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:39:29 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:39:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:39:29 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:39:29 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:39:29 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:39:29 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:39:29 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:39:29 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:39:29 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:39:29 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:39:29 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:39:29 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:39:29 INFO JCRServlets:267 - Servlet getItemById 7ba62008-739a-4787-ae33-7938ad109a54 +2016-04-08 12:39:29 INFO JCRServlets:267 - Servlet getItemById 7ba62008-739a-4787-ae33-7938ad109a54 +2016-04-08 12:39:29 INFO JCRServlets:142 - Calling servlet getChildrenById 7ba62008-739a-4787-ae33-7938ad109a54 by giancarlo.panichi +2016-04-08 12:39:29 DEBUG ServiceEngine:306 - get() - start +2016-04-08 12:39:29 DEBUG BucketCoding:32 - Coding name: path: /Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan/ rootArea /gcube/home/org.gcube.portlets.user/test-home-library/ +2016-04-08 12:39:29 DEBUG BucketCoding:42 - coding name done +2016-04-08 12:39:29 DEBUG ServiceEngine:62 - path(String) - name: DBScan +2016-04-08 12:39:29 DEBUG ServiceEngine:72 - path(String) - path: /gcube/home/org.gcube.portlets.user/test-home-library/Home/giancarlo.panichi/Workspace/StatisticalTest/ +2016-04-08 12:39:29 DEBUG OperationManager:67 - connection(boolean) - start +2016-04-08 12:39:29 INFO OperationManager:69 - startOpertion getResource..getGcubeAccessType()= null file..getGcubeAccessType() null +2016-04-08 12:39:29 INFO OperationFactory:39 - getOperation(String) - start getFolderSize +2016-04-08 12:39:29 DEBUG OperationFactory:97 - getOperation(String) - end +2016-04-08 12:39:29 DEBUG GetSize:45 - remotePath: /Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan/ +2016-04-08 12:39:29 DEBUG BucketCoding:32 - Coding name: path: /Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan/ rootArea /gcube/home/org.gcube.portlets.user/test-home-library/ +2016-04-08 12:39:29 DEBUG BucketCoding:42 - coding name done +2016-04-08 12:39:29 DEBUG TransportManagerFactory:39 - getOperation(String) - start +2016-04-08 12:39:29 INFO TransportManagerFactory:54 - 0 implementation found. Load default implementation of TransportManager +2016-04-08 12:39:29 DEBUG MongoIO:77 - open mongo connection +2016-04-08 12:39:29 INFO cluster:71 - Cluster created with settings {hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], mode=MULTIPLE, requiredClusterType=UNKNOWN, serverSelectionTimeout='30000 ms', maxWaitQueueSize=150} +2016-04-08 12:39:29 INFO cluster:71 - Adding discovered server mongo2-d-d4s.d4science.org:27017 to client view of cluster +2016-04-08 12:39:29 INFO cluster:71 - Adding discovered server mongo3-d-d4s.d4science.org:27017 to client view of cluster +2016-04-08 12:39:29 DEBUG cluster:56 - Updating cluster description to {type=UNKNOWN, servers=[{address=mongo2-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, {address=mongo3-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}] +2016-04-08 12:39:29 DEBUG MongoIO:85 - Istantiate MongoDB with options: MongoClientOptions{description='null', readPreference=primaryPreferred, writeConcern=WriteConcern{w=1, wtimeout=0, fsync=false, j=false, codecRegistry=org.bson.codecs.configuration.ProvidersCodecRegistry@21203b74, minConnectionsPerHost=0, maxConnectionsPerHost=30, threadsAllowedToBlockForConnectionMultiplier=5, serverSelectionTimeout=30000, maxWaitTime=120000, maxConnectionIdleTime=0, maxConnectionLifeTime=0, connectTimeout=30000, socketTimeout=0, socketKeepAlive=false, sslEnabled=false, sslInvalidHostNamesAllowed=false, alwaysUseMBeans=false, heartbeatFrequency=10000, minHeartbeatFrequency=500, heartbeatConnectTimeout=20000, heartbeatSocketTimeout=20000, localThreshold=15, requiredReplicaSetName='null', dbDecoderFactory=com.mongodb.DefaultDBDecoder$1@20831427, dbEncoderFactory=com.mongodb.DefaultDBEncoder$1@7da01d1d, socketFactory=javax.net.DefaultSocketFactory@1ad79b5a, cursorFinalizerEnabled=true, connectionPoolSettings=ConnectionPoolSettings{maxSize=30, minSize=0, maxWaitQueueSize=150, maxWaitTimeMS=120000, maxConnectionLifeTimeMS=0, maxConnectionIdleTimeMS=0, maintenanceInitialDelayMS=0, maintenanceFrequencyMS=60000}, socketSettings=SocketSettings{connectTimeoutMS=30000, readTimeoutMS=0, keepAlive=false, receiveBufferSize=0, sendBufferSize=0}, serverSettings=ServerSettings{heartbeatFrequencyMS=10000, minHeartbeatFrequencyMS=500}, heartbeatSocketSettings=SocketSettings{connectTimeoutMS=20000, readTimeoutMS=20000, keepAlive=false, receiveBufferSize=0, sendBufferSize=0}} +2016-04-08 12:39:29 INFO MongoIO:106 - new mongo connection pool opened +2016-04-08 12:39:29 INFO cluster:71 - No server chosen by PrimaryServerSelector from cluster description ClusterDescription{type=UNKNOWN, connectionMode=MULTIPLE, all=[ServerDescription{address=mongo2-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, ServerDescription{address=mongo3-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}]}. Waiting for 30000 ms before timing out +2016-04-08 12:39:29 INFO connection:71 - Opened connection [connectionId{localValue:7, serverValue:1377295}] to mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:29 DEBUG cluster:56 - Checking status of mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:29 INFO cluster:71 - Monitor thread successfully connected to server with description ServerDescription{address=mongo3-d-d4s.d4science.org:27017, type=REPLICA_SET_PRIMARY, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 0, 8]}, minWireVersion=0, maxWireVersion=3, electionId=56c481dca32e5d3f9711532d, maxDocumentSize=16777216, roundTripTimeNanos=1447008, setName='storagedev', canonicalAddress=mongo3-d-d4s.d4science.org:27017, hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], passives=[mongo4-d-d4s.d4science.org:27017], arbiters=[mongo1-d-d4s.d4science.org:27017], primary='mongo3-d-d4s.d4science.org:27017', tagSet=TagSet{[]}} +2016-04-08 12:39:29 INFO cluster:71 - Discovered cluster type of REPLICA_SET +2016-04-08 12:39:29 INFO cluster:71 - Adding discovered server mongo4-d-d4s.d4science.org:27017 to client view of cluster +2016-04-08 12:39:29 INFO cluster:71 - Adding discovered server mongo1-d-d4s.d4science.org:27017 to client view of cluster +2016-04-08 12:39:29 INFO cluster:71 - Discovered replica set primary mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:29 DEBUG cluster:56 - Updating cluster description to {type=REPLICA_SET, servers=[{address=mongo1-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, {address=mongo2-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, {address=mongo3-d-d4s.d4science.org:27017, type=REPLICA_SET_PRIMARY, roundTripTime=1.4 ms, state=CONNECTED}, {address=mongo4-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}] +2016-04-08 12:39:29 INFO connection:71 - Opened connection [connectionId{localValue:9, serverValue:1714972}] to mongo1-d-d4s.d4science.org:27017 +2016-04-08 12:39:29 DEBUG cluster:56 - Checking status of mongo1-d-d4s.d4science.org:27017 +2016-04-08 12:39:29 INFO cluster:71 - Monitor thread successfully connected to server with description ServerDescription{address=mongo1-d-d4s.d4science.org:27017, type=REPLICA_SET_ARBITER, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 0, 8]}, minWireVersion=0, maxWireVersion=3, electionId=null, maxDocumentSize=16777216, roundTripTimeNanos=989959, setName='storagedev', canonicalAddress=mongo1-d-d4s.d4science.org:27017, hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], passives=[mongo4-d-d4s.d4science.org:27017], arbiters=[mongo1-d-d4s.d4science.org:27017], primary='mongo3-d-d4s.d4science.org:27017', tagSet=TagSet{[]}} +2016-04-08 12:39:29 DEBUG cluster:56 - Updating cluster description to {type=REPLICA_SET, servers=[{address=mongo1-d-d4s.d4science.org:27017, type=REPLICA_SET_ARBITER, roundTripTime=1.0 ms, state=CONNECTED}, {address=mongo2-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, {address=mongo3-d-d4s.d4science.org:27017, type=REPLICA_SET_PRIMARY, roundTripTime=1.4 ms, state=CONNECTED}, {address=mongo4-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}] +2016-04-08 12:39:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:39:29 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:39:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:39:29 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:39:29 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:39:29 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:39:29 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:39:29 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:39:29 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:39:29 INFO connection:71 - Opened connection [connectionId{localValue:6, serverValue:1094342}] to mongo2-d-d4s.d4science.org:27017 +2016-04-08 12:39:29 DEBUG cluster:56 - Checking status of mongo2-d-d4s.d4science.org:27017 +2016-04-08 12:39:29 INFO cluster:71 - Monitor thread successfully connected to server with description ServerDescription{address=mongo2-d-d4s.d4science.org:27017, type=REPLICA_SET_OTHER, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 0, 8]}, minWireVersion=0, maxWireVersion=3, electionId=null, maxDocumentSize=16777216, roundTripTimeNanos=1447754, setName='storagedev', canonicalAddress=mongo2-d-d4s.d4science.org:27017, hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], passives=[mongo4-d-d4s.d4science.org:27017], arbiters=[mongo1-d-d4s.d4science.org:27017], primary='mongo3-d-d4s.d4science.org:27017', tagSet=TagSet{[]}} +2016-04-08 12:39:29 DEBUG cluster:56 - Updating cluster description to {type=REPLICA_SET, servers=[{address=mongo1-d-d4s.d4science.org:27017, type=REPLICA_SET_ARBITER, roundTripTime=1.0 ms, state=CONNECTED}, {address=mongo2-d-d4s.d4science.org:27017, type=REPLICA_SET_OTHER, roundTripTime=1.4 ms, state=CONNECTED}, {address=mongo3-d-d4s.d4science.org:27017, type=REPLICA_SET_PRIMARY, roundTripTime=1.4 ms, state=CONNECTED}, {address=mongo4-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}] +2016-04-08 12:39:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:39:29 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:39:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:39:29 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:39:29 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:39:29 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:39:29 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:39:29 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:39:29 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:39:29 INFO JCRServlets:267 - Servlet getItemById 7ba62008-739a-4787-ae33-7938ad109a54 +2016-04-08 12:39:29 INFO connection:71 - Opened connection [connectionId{localValue:8, serverValue:1125557}] to mongo4-d-d4s.d4science.org:27017 +2016-04-08 12:39:29 DEBUG cluster:56 - Checking status of mongo4-d-d4s.d4science.org:27017 +2016-04-08 12:39:29 INFO cluster:71 - Monitor thread successfully connected to server with description ServerDescription{address=mongo4-d-d4s.d4science.org:27017, type=REPLICA_SET_SECONDARY, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 0, 7]}, minWireVersion=0, maxWireVersion=3, electionId=null, maxDocumentSize=16777216, roundTripTimeNanos=1341187, setName='storagedev', canonicalAddress=mongo4-d-d4s.d4science.org:27017, hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], passives=[mongo4-d-d4s.d4science.org:27017], arbiters=[mongo1-d-d4s.d4science.org:27017], primary='mongo3-d-d4s.d4science.org:27017', tagSet=TagSet{[]}} +2016-04-08 12:39:29 DEBUG cluster:56 - Updating cluster description to {type=REPLICA_SET, servers=[{address=mongo1-d-d4s.d4science.org:27017, type=REPLICA_SET_ARBITER, roundTripTime=1.0 ms, state=CONNECTED}, {address=mongo2-d-d4s.d4science.org:27017, type=REPLICA_SET_OTHER, roundTripTime=1.4 ms, state=CONNECTED}, {address=mongo3-d-d4s.d4science.org:27017, type=REPLICA_SET_PRIMARY, roundTripTime=1.4 ms, state=CONNECTED}, {address=mongo4-d-d4s.d4science.org:27017, type=REPLICA_SET_SECONDARY, roundTripTime=1.3 ms, state=CONNECTED}] +2016-04-08 12:39:29 INFO JCRServlets:267 - Servlet getItemById 7ba62008-739a-4787-ae33-7938ad109a54 +2016-04-08 12:39:29 INFO connection:71 - Opened connection [connectionId{localValue:10, serverValue:1377296}] to mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:29 DEBUG command:56 - Sending command {createIndexes : BsonString{value='fs.files'}} to database remotefs on connection [connectionId{localValue:10, serverValue:1377296}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:29 DEBUG command:56 - Command execution completed +2016-04-08 12:39:29 DEBUG command:56 - Sending command {createIndexes : BsonString{value='fs.files'}} to database remotefs on connection [connectionId{localValue:10, serverValue:1377296}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:29 DEBUG command:56 - Command execution completed +2016-04-08 12:39:29 DEBUG command:56 - Sending command {createIndexes : BsonString{value='fs.files'}} to database remotefs on connection [connectionId{localValue:10, serverValue:1377296}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:29 DEBUG command:56 - Command execution completed +2016-04-08 12:39:29 DEBUG DefaultMongoClient:1324 - getFolderTotalVolume for folder /gcube/home/org.gcube.portlets.user/test-home-library/Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan +2016-04-08 12:39:29 DEBUG command:56 - Sending command {count : BsonString{value='fs.files'}} to database remotefs on connection [connectionId{localValue:10, serverValue:1377296}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:29 INFO JCRServlets:697 - Calling Servlet get Parents By Id 7ba62008-739a-4787-ae33-7938ad109a54 by giancarlo.panichi +2016-04-08 12:39:29 DEBUG command:56 - Command execution completed +2016-04-08 12:39:29 DEBUG command:56 - Sending command {count : BsonString{value='fs.chunks'}} to database remotefs on connection [connectionId{localValue:10, serverValue:1377296}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:29 INFO JCRServlets:142 - Calling servlet getChildrenById 7ba62008-739a-4787-ae33-7938ad109a54 by giancarlo.panichi +2016-04-08 12:39:29 DEBUG command:56 - Command execution completed +2016-04-08 12:39:29 DEBUG query:56 - Sending query of namespace remotefs.fs.files on connection [connectionId{localValue:10, serverValue:1377296}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:29 INFO WorkspaceExplorerServiceImpl:188 - end time - 61 msc 0 sec +2016-04-08 12:39:29 INFO JCRServlets:267 - Servlet getItemById 6543184f-3c92-4982-88ed-1287d525eca2 +2016-04-08 12:39:29 DEBUG ItemBuilder:361 - breadcrumb returning: Item [id=7ba62008-739a-4787-ae33-7938ad109a54, name=DBScan, type=FOLDER, path=null, children=[], owner=null, isFolder=true, isSpecialFolder=false, isSharedFolder=false, isRoot=false, creationDate=null] +2016-04-08 12:39:29 INFO JCRServlets:267 - Servlet getItemById efc48ebb-f682-4636-9a98-1cbee166e336 +2016-04-08 12:39:29 DEBUG ItemBuilder:361 - breadcrumb returning: Item [id=6543184f-3c92-4982-88ed-1287d525eca2, name=StatisticalTest, type=FOLDER, path=null, children=[], owner=null, isFolder=true, isSpecialFolder=false, isSharedFolder=false, isRoot=false, creationDate=null] +2016-04-08 12:39:29 DEBUG ItemBuilder:361 - breadcrumb returning: Item [id=efc48ebb-f682-4636-9a98-1cbee166e336, name=Home, type=FOLDER, path=null, children=[], owner=null, isFolder=true, isSpecialFolder=false, isSharedFolder=false, isRoot=true, creationDate=null] +2016-04-08 12:39:30 DEBUG query:56 - Query completed +2016-04-08 12:39:30 INFO DefaultMongoClient:1372 - retrieveRemoteFileObject found 5 objects +2016-04-08 12:39:30 DEBUG connection:56 - Closing connection connectionId{localValue:4, serverValue:1714971} +2016-04-08 12:39:30 DEBUG connection:56 - Closing connection connectionId{localValue:2, serverValue:1094341} +2016-04-08 12:39:30 DEBUG connection:56 - Closing connection connectionId{localValue:3, serverValue:1125556} +2016-04-08 12:39:30 INFO connection:71 - Closed connection [connectionId{localValue:5, serverValue:1377294}] to mongo3-d-d4s.d4science.org:27017 because the pool has been closed. +2016-04-08 12:39:30 DEBUG connection:56 - Closing connection connectionId{localValue:5, serverValue:1377294} +2016-04-08 12:39:30 DEBUG connection:56 - Closing connection connectionId{localValue:1, serverValue:1377293} +2016-04-08 12:39:30 INFO MongoIO:508 - Mongo has been closed +2016-04-08 12:39:30 INFO DefaultMongoClient:1329 - getFolderTotalVolume 129978 for folder /gcube/home/org.gcube.portlets.user/test-home-library/Home/giancarlo.panichi/Workspace/StatisticalTest +2016-04-08 12:39:30 DEBUG GetSize:35 - PATH /gcube/home/org.gcube.portlets.user/test-home-library/Home/giancarlo.panichi/Workspace/StatisticalTest +2016-04-08 12:39:30 INFO WorkspaceExplorerServiceImpl:503 - returning size: 129978 +2016-04-08 12:39:30 DEBUG query:56 - Query completed +2016-04-08 12:39:30 INFO DefaultMongoClient:1372 - retrieveRemoteFileObject found 1 objects +2016-04-08 12:39:30 DEBUG connection:56 - Closing connection connectionId{localValue:9, serverValue:1714972} +2016-04-08 12:39:30 DEBUG connection:56 - Closing connection connectionId{localValue:6, serverValue:1094342} +2016-04-08 12:39:30 INFO connection:71 - Closed connection [connectionId{localValue:10, serverValue:1377296}] to mongo3-d-d4s.d4science.org:27017 because the pool has been closed. +2016-04-08 12:39:30 DEBUG connection:56 - Closing connection connectionId{localValue:8, serverValue:1125557} +2016-04-08 12:39:30 DEBUG connection:56 - Closing connection connectionId{localValue:10, serverValue:1377296} +2016-04-08 12:39:30 INFO MongoIO:508 - Mongo has been closed +2016-04-08 12:39:30 DEBUG connection:56 - Closing connection connectionId{localValue:7, serverValue:1377295} +2016-04-08 12:39:30 INFO DefaultMongoClient:1329 - getFolderTotalVolume 849 for folder /gcube/home/org.gcube.portlets.user/test-home-library/Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan +2016-04-08 12:39:30 DEBUG GetSize:35 - PATH /gcube/home/org.gcube.portlets.user/test-home-library/Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan +2016-04-08 12:39:30 INFO WorkspaceExplorerServiceImpl:503 - returning size: 849 +2016-04-08 12:39:33 INFO WorkspaceExplorerServiceImpl:600 - getFormattedSize ByItemId 13d0382e-8833-4df7-b4dc-61bbba61b3e5 +2016-04-08 12:39:33 INFO WorkspaceExplorerServiceImpl:486 - get Size By ItemId 13d0382e-8833-4df7-b4dc-61bbba61b3e5 +2016-04-08 12:39:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:39:33 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:39:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:39:33 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:39:33 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:39:33 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:39:33 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:39:33 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:39:33 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:39:33 INFO WorkspaceExplorerServiceImpl:518 - get MimeType By ItemId 13d0382e-8833-4df7-b4dc-61bbba61b3e5 +2016-04-08 12:39:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:39:33 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:39:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:39:33 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:39:33 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:39:33 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:39:33 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:39:33 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:39:33 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:39:33 INFO WorkspaceExplorerServiceImpl:549 - Get user ACL to FOLDER id: 13d0382e-8833-4df7-b4dc-61bbba61b3e5 +2016-04-08 12:39:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:39:33 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:39:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:39:33 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:39:33 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:39:33 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:39:33 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:39:33 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:39:33 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:39:33 INFO JCRServlets:267 - Servlet getItemById 13d0382e-8833-4df7-b4dc-61bbba61b3e5 +2016-04-08 12:39:33 INFO JCRServlets:267 - Servlet getItemById 13d0382e-8833-4df7-b4dc-61bbba61b3e5 +2016-04-08 12:39:33 INFO JCRServlets:267 - Servlet getItemById 13d0382e-8833-4df7-b4dc-61bbba61b3e5 +2016-04-08 12:39:33 INFO WorkspaceExplorerServiceImpl:503 - returning size: 849 +2016-04-08 12:39:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:39:34 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:39:34 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:39:34 DEBUG DataMinerManagerServiceImpl:546 - retrieveTableInformation(): Item [id=13d0382e-8833-4df7-b4dc-61bbba61b3e5, name=hcaf_filtered.csv, type=EXTERNAL_FILE, path=/Workspace/StatisticalTest/DBScan/hcaf_filtered.csv, children=[], owner=giancarlo.panichi, isFolder=false, isSpecialFolder=false, isSharedFolder=false, isRoot=false, creationDate=Wed Mar 09 17:10:22 CET 2016] +2016-04-08 12:39:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:39:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:39:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:39:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:39:34 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:39:34 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:39:34 INFO JCRServlets:267 - Servlet getItemById 13d0382e-8833-4df7-b4dc-61bbba61b3e5 +2016-04-08 12:39:34 DEBUG ServiceEngine:193 - get() - start +2016-04-08 12:39:34 DEBUG BucketCoding:32 - Coding name: path: /Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan/hcaf_filtered.csv rootArea /gcube/home/org.gcube.portlets.user/test-home-library/ +2016-04-08 12:39:34 DEBUG BucketCoding:42 - coding name done +2016-04-08 12:39:34 DEBUG ServiceEngine:62 - path(String) - name: hcaf_filtered.csv +2016-04-08 12:39:34 DEBUG ServiceEngine:72 - path(String) - path: /gcube/home/org.gcube.portlets.user/test-home-library/Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan/ +2016-04-08 12:39:34 DEBUG OperationManager:67 - connection(boolean) - start +2016-04-08 12:39:34 INFO OperationManager:69 - startOpertion getResource..getGcubeAccessType()= null file..getGcubeAccessType() null +2016-04-08 12:39:34 INFO OperationFactory:39 - getOperation(String) - start download +2016-04-08 12:39:34 DEBUG OperationFactory:97 - getOperation(String) - end +2016-04-08 12:39:34 DEBUG BucketCoding:32 - Coding name: path: /Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan/hcaf_filtered.csv rootArea /gcube/home/org.gcube.portlets.user/test-home-library/ +2016-04-08 12:39:34 DEBUG BucketCoding:42 - coding name done +2016-04-08 12:39:34 DEBUG Download:57 - DOWNLOAD /Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan/hcaf_filtered.csv in bucket: /gcube/home/org.gcube.portlets.user/test-home-library/Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan/hcaf_filtered.csv +2016-04-08 12:39:34 DEBUG Operation:173 - get(String) - start +2016-04-08 12:39:34 DEBUG TransportManagerFactory:39 - getOperation(String) - start +2016-04-08 12:39:34 INFO TransportManagerFactory:54 - 0 implementation found. Load default implementation of TransportManager +2016-04-08 12:39:34 DEBUG MongoIO:77 - open mongo connection +2016-04-08 12:39:34 INFO cluster:71 - Cluster created with settings {hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], mode=MULTIPLE, requiredClusterType=UNKNOWN, serverSelectionTimeout='30000 ms', maxWaitQueueSize=150} +2016-04-08 12:39:34 INFO cluster:71 - Adding discovered server mongo2-d-d4s.d4science.org:27017 to client view of cluster +2016-04-08 12:39:34 INFO cluster:71 - Adding discovered server mongo3-d-d4s.d4science.org:27017 to client view of cluster +2016-04-08 12:39:34 DEBUG cluster:56 - Updating cluster description to {type=UNKNOWN, servers=[{address=mongo2-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, {address=mongo3-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}] +2016-04-08 12:39:34 DEBUG MongoIO:85 - Istantiate MongoDB with options: MongoClientOptions{description='null', readPreference=primaryPreferred, writeConcern=WriteConcern{w=1, wtimeout=0, fsync=false, j=false, codecRegistry=org.bson.codecs.configuration.ProvidersCodecRegistry@21203b74, minConnectionsPerHost=0, maxConnectionsPerHost=30, threadsAllowedToBlockForConnectionMultiplier=5, serverSelectionTimeout=30000, maxWaitTime=120000, maxConnectionIdleTime=0, maxConnectionLifeTime=0, connectTimeout=30000, socketTimeout=0, socketKeepAlive=false, sslEnabled=false, sslInvalidHostNamesAllowed=false, alwaysUseMBeans=false, heartbeatFrequency=10000, minHeartbeatFrequency=500, heartbeatConnectTimeout=20000, heartbeatSocketTimeout=20000, localThreshold=15, requiredReplicaSetName='null', dbDecoderFactory=com.mongodb.DefaultDBDecoder$1@20831427, dbEncoderFactory=com.mongodb.DefaultDBEncoder$1@7da01d1d, socketFactory=javax.net.DefaultSocketFactory@1ad79b5a, cursorFinalizerEnabled=true, connectionPoolSettings=ConnectionPoolSettings{maxSize=30, minSize=0, maxWaitQueueSize=150, maxWaitTimeMS=120000, maxConnectionLifeTimeMS=0, maxConnectionIdleTimeMS=0, maintenanceInitialDelayMS=0, maintenanceFrequencyMS=60000}, socketSettings=SocketSettings{connectTimeoutMS=30000, readTimeoutMS=0, keepAlive=false, receiveBufferSize=0, sendBufferSize=0}, serverSettings=ServerSettings{heartbeatFrequencyMS=10000, minHeartbeatFrequencyMS=500}, heartbeatSocketSettings=SocketSettings{connectTimeoutMS=20000, readTimeoutMS=20000, keepAlive=false, receiveBufferSize=0, sendBufferSize=0}} +2016-04-08 12:39:34 INFO MongoIO:106 - new mongo connection pool opened +2016-04-08 12:39:34 INFO cluster:71 - No server chosen by PrimaryServerSelector from cluster description ClusterDescription{type=UNKNOWN, connectionMode=MULTIPLE, all=[ServerDescription{address=mongo2-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, ServerDescription{address=mongo3-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}]}. Waiting for 30000 ms before timing out +2016-04-08 12:39:34 INFO connection:71 - Opened connection [connectionId{localValue:11, serverValue:1094345}] to mongo2-d-d4s.d4science.org:27017 +2016-04-08 12:39:34 DEBUG cluster:56 - Checking status of mongo2-d-d4s.d4science.org:27017 +2016-04-08 12:39:34 INFO cluster:71 - Monitor thread successfully connected to server with description ServerDescription{address=mongo2-d-d4s.d4science.org:27017, type=REPLICA_SET_OTHER, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 0, 8]}, minWireVersion=0, maxWireVersion=3, electionId=null, maxDocumentSize=16777216, roundTripTimeNanos=1390691, setName='storagedev', canonicalAddress=mongo2-d-d4s.d4science.org:27017, hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], passives=[mongo4-d-d4s.d4science.org:27017], arbiters=[mongo1-d-d4s.d4science.org:27017], primary='mongo3-d-d4s.d4science.org:27017', tagSet=TagSet{[]}} +2016-04-08 12:39:34 INFO cluster:71 - Discovered cluster type of REPLICA_SET +2016-04-08 12:39:34 INFO connection:71 - Opened connection [connectionId{localValue:12, serverValue:1377297}] to mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:34 INFO cluster:71 - Adding discovered server mongo4-d-d4s.d4science.org:27017 to client view of cluster +2016-04-08 12:39:34 DEBUG cluster:56 - Checking status of mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:34 INFO cluster:71 - Adding discovered server mongo1-d-d4s.d4science.org:27017 to client view of cluster +2016-04-08 12:39:34 DEBUG cluster:56 - Updating cluster description to {type=REPLICA_SET, servers=[{address=mongo1-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, {address=mongo2-d-d4s.d4science.org:27017, type=REPLICA_SET_OTHER, roundTripTime=1.4 ms, state=CONNECTED}, {address=mongo3-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, {address=mongo4-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}] +2016-04-08 12:39:34 INFO cluster:71 - Monitor thread successfully connected to server with description ServerDescription{address=mongo3-d-d4s.d4science.org:27017, type=REPLICA_SET_PRIMARY, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 0, 8]}, minWireVersion=0, maxWireVersion=3, electionId=56c481dca32e5d3f9711532d, maxDocumentSize=16777216, roundTripTimeNanos=1559481, setName='storagedev', canonicalAddress=mongo3-d-d4s.d4science.org:27017, hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], passives=[mongo4-d-d4s.d4science.org:27017], arbiters=[mongo1-d-d4s.d4science.org:27017], primary='mongo3-d-d4s.d4science.org:27017', tagSet=TagSet{[]}} +2016-04-08 12:39:34 INFO cluster:71 - Discovered replica set primary mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:34 DEBUG cluster:56 - Updating cluster description to {type=REPLICA_SET, servers=[{address=mongo1-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, {address=mongo2-d-d4s.d4science.org:27017, type=REPLICA_SET_OTHER, roundTripTime=1.4 ms, state=CONNECTED}, {address=mongo3-d-d4s.d4science.org:27017, type=REPLICA_SET_PRIMARY, roundTripTime=1.6 ms, state=CONNECTED}, {address=mongo4-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}] +2016-04-08 12:39:34 INFO connection:71 - Opened connection [connectionId{localValue:13, serverValue:1714976}] to mongo1-d-d4s.d4science.org:27017 +2016-04-08 12:39:34 DEBUG cluster:56 - Checking status of mongo1-d-d4s.d4science.org:27017 +2016-04-08 12:39:34 INFO cluster:71 - Monitor thread successfully connected to server with description ServerDescription{address=mongo1-d-d4s.d4science.org:27017, type=REPLICA_SET_ARBITER, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 0, 8]}, minWireVersion=0, maxWireVersion=3, electionId=null, maxDocumentSize=16777216, roundTripTimeNanos=954183, setName='storagedev', canonicalAddress=mongo1-d-d4s.d4science.org:27017, hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], passives=[mongo4-d-d4s.d4science.org:27017], arbiters=[mongo1-d-d4s.d4science.org:27017], primary='mongo3-d-d4s.d4science.org:27017', tagSet=TagSet{[]}} +2016-04-08 12:39:34 DEBUG cluster:56 - Updating cluster description to {type=REPLICA_SET, servers=[{address=mongo1-d-d4s.d4science.org:27017, type=REPLICA_SET_ARBITER, roundTripTime=1.0 ms, state=CONNECTED}, {address=mongo2-d-d4s.d4science.org:27017, type=REPLICA_SET_OTHER, roundTripTime=1.4 ms, state=CONNECTED}, {address=mongo3-d-d4s.d4science.org:27017, type=REPLICA_SET_PRIMARY, roundTripTime=1.6 ms, state=CONNECTED}, {address=mongo4-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}] +2016-04-08 12:39:34 INFO connection:71 - Opened connection [connectionId{localValue:14, serverValue:1125561}] to mongo4-d-d4s.d4science.org:27017 +2016-04-08 12:39:34 DEBUG cluster:56 - Checking status of mongo4-d-d4s.d4science.org:27017 +2016-04-08 12:39:34 INFO cluster:71 - Monitor thread successfully connected to server with description ServerDescription{address=mongo4-d-d4s.d4science.org:27017, type=REPLICA_SET_SECONDARY, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 0, 7]}, minWireVersion=0, maxWireVersion=3, electionId=null, maxDocumentSize=16777216, roundTripTimeNanos=1446308, setName='storagedev', canonicalAddress=mongo4-d-d4s.d4science.org:27017, hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], passives=[mongo4-d-d4s.d4science.org:27017], arbiters=[mongo1-d-d4s.d4science.org:27017], primary='mongo3-d-d4s.d4science.org:27017', tagSet=TagSet{[]}} +2016-04-08 12:39:34 DEBUG cluster:56 - Updating cluster description to {type=REPLICA_SET, servers=[{address=mongo1-d-d4s.d4science.org:27017, type=REPLICA_SET_ARBITER, roundTripTime=1.0 ms, state=CONNECTED}, {address=mongo2-d-d4s.d4science.org:27017, type=REPLICA_SET_OTHER, roundTripTime=1.4 ms, state=CONNECTED}, {address=mongo3-d-d4s.d4science.org:27017, type=REPLICA_SET_PRIMARY, roundTripTime=1.6 ms, state=CONNECTED}, {address=mongo4-d-d4s.d4science.org:27017, type=REPLICA_SET_SECONDARY, roundTripTime=1.4 ms, state=CONNECTED}] +2016-04-08 12:39:35 INFO connection:71 - Opened connection [connectionId{localValue:15, serverValue:1377298}] to mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG command:56 - Sending command {createIndexes : BsonString{value='fs.files'}} to database remotefs on connection [connectionId{localValue:15, serverValue:1377298}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG command:56 - Command execution completed +2016-04-08 12:39:35 DEBUG command:56 - Sending command {createIndexes : BsonString{value='fs.files'}} to database remotefs on connection [connectionId{localValue:15, serverValue:1377298}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG command:56 - Command execution completed +2016-04-08 12:39:35 DEBUG command:56 - Sending command {createIndexes : BsonString{value='fs.files'}} to database remotefs on connection [connectionId{localValue:15, serverValue:1377298}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG command:56 - Command execution completed +2016-04-08 12:39:35 INFO DefaultMongoClient:96 - MongoClient get method: OperationDefinition [operation=DOWNLOAD, localResource=VOID, remoteResource=PATH_FOR_INPUT_STREAM] +2016-04-08 12:39:35 DEBUG command:56 - Sending command {count : BsonString{value='fs.files'}} to database remotefs on connection [connectionId{localValue:15, serverValue:1377298}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG command:56 - Command execution completed +2016-04-08 12:39:35 DEBUG command:56 - Sending command {count : BsonString{value='fs.chunks'}} to database remotefs on connection [connectionId{localValue:15, serverValue:1377298}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG command:56 - Command execution completed +2016-04-08 12:39:35 INFO MongoIO:137 - MongoDB - retrieve object from pathServer: /gcube/home/org.gcube.portlets.user/test-home-library/Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan/hcaf_filtered.csv +2016-04-08 12:39:35 DEBUG command:56 - Sending command {count : BsonString{value='fs.files'}} to database remotefs on connection [connectionId{localValue:15, serverValue:1377298}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG command:56 - Command execution completed +2016-04-08 12:39:35 DEBUG command:56 - Sending command {count : BsonString{value='fs.chunks'}} to database remotefs on connection [connectionId{localValue:15, serverValue:1377298}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG command:56 - Command execution completed +2016-04-08 12:39:35 INFO MongoIO:168 - remote object is not a validID : /gcube/home/org.gcube.portlets.user/test-home-library/Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan/hcaf_filtered.csv +2016-04-08 12:39:35 DEBUG query:56 - Sending query of namespace remotefs.fs.files on connection [connectionId{localValue:15, serverValue:1377298}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG query:56 - Query completed +2016-04-08 12:39:35 INFO MongoIO:186 - object found hcaf_filtered.csv +2016-04-08 12:39:35 INFO DefaultMongoClient:1047 - MongoClient download method: OperationDefinition [operation=DOWNLOAD, localResource=VOID, remoteResource=PATH_FOR_INPUT_STREAM] +2016-04-08 12:39:35 INFO DefaultMongoClient:1491 - set last operation: DOWNLOAD +2016-04-08 12:39:35 DEBUG update:56 - Updating documents in namespace remotefs.fs.files on connection [connectionId{localValue:15, serverValue:1377298}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG update:56 - Update completed +2016-04-08 12:39:35 DEBUG query:56 - Sending query of namespace remotefs.fs.chunks on connection [connectionId{localValue:15, serverValue:1377298}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG query:56 - Query completed +2016-04-08 12:39:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 12:39:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 12:39:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 12:39:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 12:39:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 12:39:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 12:39:35 INFO JCRServlets:267 - Servlet getItemById 13d0382e-8833-4df7-b4dc-61bbba61b3e5 +2016-04-08 12:39:35 INFO JCRWorkspaceItem:998 - get PublicLink for item: hcaf_filtered.csv +2016-04-08 12:39:35 INFO ServiceEngine:34 - file gCube parameter costructor: shared /gcube +2016-04-08 12:39:35 INFO ServiceEngine:44 - file gCube parameter before: shared /gcube +2016-04-08 12:39:35 DEBUG BucketCoding:32 - Coding name: path: /Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan/hcaf_filtered.csv rootArea /gcube/home/org.gcube.portlets.user/test-home-library/ +2016-04-08 12:39:35 DEBUG BucketCoding:42 - coding name done +2016-04-08 12:39:35 DEBUG ServiceEngine:62 - path(String) - name: hcaf_filtered.csv +2016-04-08 12:39:35 DEBUG ServiceEngine:72 - path(String) - path: /gcube/home/org.gcube.portlets.user/test-home-library/Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan/ +2016-04-08 12:39:35 INFO ServiceEngine:52 - file gCube parameter after: shared /gcube +2016-04-08 12:39:35 DEBUG OperationManager:67 - connection(boolean) - start +2016-04-08 12:39:35 INFO OperationManager:69 - startOpertion getResource..getGcubeAccessType()= shared file..getGcubeAccessType() shared +2016-04-08 12:39:35 INFO OperationFactory:39 - getOperation(String) - start getHttpUrl +2016-04-08 12:39:35 DEBUG OperationFactory:97 - getOperation(String) - end +2016-04-08 12:39:35 DEBUG BucketCoding:32 - Coding name: path: /Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan/hcaf_filtered.csv rootArea /gcube/home/org.gcube.portlets.user/test-home-library/ +2016-04-08 12:39:35 DEBUG BucketCoding:42 - coding name done +2016-04-08 12:39:35 DEBUG TransportManagerFactory:39 - getOperation(String) - start +2016-04-08 12:39:35 INFO TransportManagerFactory:54 - 0 implementation found. Load default implementation of TransportManager +2016-04-08 12:39:35 DEBUG MongoIO:77 - open mongo connection +2016-04-08 12:39:35 INFO cluster:71 - Cluster created with settings {hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], mode=MULTIPLE, requiredClusterType=UNKNOWN, serverSelectionTimeout='30000 ms', maxWaitQueueSize=150} +2016-04-08 12:39:35 INFO cluster:71 - Adding discovered server mongo2-d-d4s.d4science.org:27017 to client view of cluster +2016-04-08 12:39:35 INFO cluster:71 - Adding discovered server mongo3-d-d4s.d4science.org:27017 to client view of cluster +2016-04-08 12:39:35 DEBUG cluster:56 - Updating cluster description to {type=UNKNOWN, servers=[{address=mongo2-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, {address=mongo3-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}] +2016-04-08 12:39:35 DEBUG MongoIO:85 - Istantiate MongoDB with options: MongoClientOptions{description='null', readPreference=primaryPreferred, writeConcern=WriteConcern{w=1, wtimeout=0, fsync=false, j=false, codecRegistry=org.bson.codecs.configuration.ProvidersCodecRegistry@21203b74, minConnectionsPerHost=0, maxConnectionsPerHost=30, threadsAllowedToBlockForConnectionMultiplier=5, serverSelectionTimeout=30000, maxWaitTime=120000, maxConnectionIdleTime=0, maxConnectionLifeTime=0, connectTimeout=30000, socketTimeout=0, socketKeepAlive=false, sslEnabled=false, sslInvalidHostNamesAllowed=false, alwaysUseMBeans=false, heartbeatFrequency=10000, minHeartbeatFrequency=500, heartbeatConnectTimeout=20000, heartbeatSocketTimeout=20000, localThreshold=15, requiredReplicaSetName='null', dbDecoderFactory=com.mongodb.DefaultDBDecoder$1@20831427, dbEncoderFactory=com.mongodb.DefaultDBEncoder$1@7da01d1d, socketFactory=javax.net.DefaultSocketFactory@1ad79b5a, cursorFinalizerEnabled=true, connectionPoolSettings=ConnectionPoolSettings{maxSize=30, minSize=0, maxWaitQueueSize=150, maxWaitTimeMS=120000, maxConnectionLifeTimeMS=0, maxConnectionIdleTimeMS=0, maintenanceInitialDelayMS=0, maintenanceFrequencyMS=60000}, socketSettings=SocketSettings{connectTimeoutMS=30000, readTimeoutMS=0, keepAlive=false, receiveBufferSize=0, sendBufferSize=0}, serverSettings=ServerSettings{heartbeatFrequencyMS=10000, minHeartbeatFrequencyMS=500}, heartbeatSocketSettings=SocketSettings{connectTimeoutMS=20000, readTimeoutMS=20000, keepAlive=false, receiveBufferSize=0, sendBufferSize=0}} +2016-04-08 12:39:35 INFO MongoIO:106 - new mongo connection pool opened +2016-04-08 12:39:35 INFO cluster:71 - No server chosen by PrimaryServerSelector from cluster description ClusterDescription{type=UNKNOWN, connectionMode=MULTIPLE, all=[ServerDescription{address=mongo2-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, ServerDescription{address=mongo3-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}]}. Waiting for 30000 ms before timing out +2016-04-08 12:39:35 INFO connection:71 - Opened connection [connectionId{localValue:16, serverValue:1094346}] to mongo2-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG cluster:56 - Checking status of mongo2-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 INFO cluster:71 - Monitor thread successfully connected to server with description ServerDescription{address=mongo2-d-d4s.d4science.org:27017, type=REPLICA_SET_OTHER, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 0, 8]}, minWireVersion=0, maxWireVersion=3, electionId=null, maxDocumentSize=16777216, roundTripTimeNanos=1402270, setName='storagedev', canonicalAddress=mongo2-d-d4s.d4science.org:27017, hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], passives=[mongo4-d-d4s.d4science.org:27017], arbiters=[mongo1-d-d4s.d4science.org:27017], primary='mongo3-d-d4s.d4science.org:27017', tagSet=TagSet{[]}} +2016-04-08 12:39:35 INFO cluster:71 - Discovered cluster type of REPLICA_SET +2016-04-08 12:39:35 INFO cluster:71 - Adding discovered server mongo4-d-d4s.d4science.org:27017 to client view of cluster +2016-04-08 12:39:35 INFO cluster:71 - Adding discovered server mongo1-d-d4s.d4science.org:27017 to client view of cluster +2016-04-08 12:39:35 DEBUG cluster:56 - Updating cluster description to {type=REPLICA_SET, servers=[{address=mongo1-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, {address=mongo2-d-d4s.d4science.org:27017, type=REPLICA_SET_OTHER, roundTripTime=1.4 ms, state=CONNECTED}, {address=mongo3-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, {address=mongo4-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}] +2016-04-08 12:39:35 INFO connection:71 - Opened connection [connectionId{localValue:19, serverValue:1714977}] to mongo1-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG cluster:56 - Checking status of mongo1-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 INFO cluster:71 - Monitor thread successfully connected to server with description ServerDescription{address=mongo1-d-d4s.d4science.org:27017, type=REPLICA_SET_ARBITER, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 0, 8]}, minWireVersion=0, maxWireVersion=3, electionId=null, maxDocumentSize=16777216, roundTripTimeNanos=927833, setName='storagedev', canonicalAddress=mongo1-d-d4s.d4science.org:27017, hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], passives=[mongo4-d-d4s.d4science.org:27017], arbiters=[mongo1-d-d4s.d4science.org:27017], primary='mongo3-d-d4s.d4science.org:27017', tagSet=TagSet{[]}} +2016-04-08 12:39:35 DEBUG cluster:56 - Updating cluster description to {type=REPLICA_SET, servers=[{address=mongo1-d-d4s.d4science.org:27017, type=REPLICA_SET_ARBITER, roundTripTime=0.9 ms, state=CONNECTED}, {address=mongo2-d-d4s.d4science.org:27017, type=REPLICA_SET_OTHER, roundTripTime=1.4 ms, state=CONNECTED}, {address=mongo3-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}, {address=mongo4-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}] +2016-04-08 12:39:35 INFO connection:71 - Opened connection [connectionId{localValue:17, serverValue:1377299}] to mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG cluster:56 - Checking status of mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 INFO cluster:71 - Monitor thread successfully connected to server with description ServerDescription{address=mongo3-d-d4s.d4science.org:27017, type=REPLICA_SET_PRIMARY, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 0, 8]}, minWireVersion=0, maxWireVersion=3, electionId=56c481dca32e5d3f9711532d, maxDocumentSize=16777216, roundTripTimeNanos=1130971, setName='storagedev', canonicalAddress=mongo3-d-d4s.d4science.org:27017, hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], passives=[mongo4-d-d4s.d4science.org:27017], arbiters=[mongo1-d-d4s.d4science.org:27017], primary='mongo3-d-d4s.d4science.org:27017', tagSet=TagSet{[]}} +2016-04-08 12:39:35 INFO cluster:71 - Discovered replica set primary mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG cluster:56 - Updating cluster description to {type=REPLICA_SET, servers=[{address=mongo1-d-d4s.d4science.org:27017, type=REPLICA_SET_ARBITER, roundTripTime=0.9 ms, state=CONNECTED}, {address=mongo2-d-d4s.d4science.org:27017, type=REPLICA_SET_OTHER, roundTripTime=1.4 ms, state=CONNECTED}, {address=mongo3-d-d4s.d4science.org:27017, type=REPLICA_SET_PRIMARY, roundTripTime=1.1 ms, state=CONNECTED}, {address=mongo4-d-d4s.d4science.org:27017, type=UNKNOWN, state=CONNECTING}] +2016-04-08 12:39:35 INFO connection:71 - Opened connection [connectionId{localValue:18, serverValue:1125562}] to mongo4-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG cluster:56 - Checking status of mongo4-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 INFO cluster:71 - Monitor thread successfully connected to server with description ServerDescription{address=mongo4-d-d4s.d4science.org:27017, type=REPLICA_SET_SECONDARY, state=CONNECTED, ok=true, version=ServerVersion{versionList=[3, 0, 7]}, minWireVersion=0, maxWireVersion=3, electionId=null, maxDocumentSize=16777216, roundTripTimeNanos=1266916, setName='storagedev', canonicalAddress=mongo4-d-d4s.d4science.org:27017, hosts=[mongo2-d-d4s.d4science.org:27017, mongo3-d-d4s.d4science.org:27017], passives=[mongo4-d-d4s.d4science.org:27017], arbiters=[mongo1-d-d4s.d4science.org:27017], primary='mongo3-d-d4s.d4science.org:27017', tagSet=TagSet{[]}} +2016-04-08 12:39:35 DEBUG cluster:56 - Updating cluster description to {type=REPLICA_SET, servers=[{address=mongo1-d-d4s.d4science.org:27017, type=REPLICA_SET_ARBITER, roundTripTime=0.9 ms, state=CONNECTED}, {address=mongo2-d-d4s.d4science.org:27017, type=REPLICA_SET_OTHER, roundTripTime=1.4 ms, state=CONNECTED}, {address=mongo3-d-d4s.d4science.org:27017, type=REPLICA_SET_PRIMARY, roundTripTime=1.1 ms, state=CONNECTED}, {address=mongo4-d-d4s.d4science.org:27017, type=REPLICA_SET_SECONDARY, roundTripTime=1.3 ms, state=CONNECTED}] +2016-04-08 12:39:35 INFO connection:71 - Opened connection [connectionId{localValue:20, serverValue:1377300}] to mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG command:56 - Sending command {createIndexes : BsonString{value='fs.files'}} to database remotefs on connection [connectionId{localValue:20, serverValue:1377300}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG command:56 - Command execution completed +2016-04-08 12:39:35 DEBUG command:56 - Sending command {createIndexes : BsonString{value='fs.files'}} to database remotefs on connection [connectionId{localValue:20, serverValue:1377300}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG command:56 - Command execution completed +2016-04-08 12:39:35 DEBUG command:56 - Sending command {createIndexes : BsonString{value='fs.files'}} to database remotefs on connection [connectionId{localValue:20, serverValue:1377300}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG command:56 - Command execution completed +2016-04-08 12:39:35 DEBUG DefaultMongoClient:1405 - MongoDB - pathServer: /gcube/home/org.gcube.portlets.user/test-home-library/Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan/hcaf_filtered.csv +2016-04-08 12:39:35 INFO MongoIO:137 - MongoDB - retrieve object from pathServer: /gcube/home/org.gcube.portlets.user/test-home-library/Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan/hcaf_filtered.csv +2016-04-08 12:39:35 DEBUG command:56 - Sending command {count : BsonString{value='fs.files'}} to database remotefs on connection [connectionId{localValue:20, serverValue:1377300}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG command:56 - Command execution completed +2016-04-08 12:39:35 DEBUG command:56 - Sending command {count : BsonString{value='fs.chunks'}} to database remotefs on connection [connectionId{localValue:20, serverValue:1377300}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG command:56 - Command execution completed +2016-04-08 12:39:35 INFO MongoIO:168 - remote object is not a validID : /gcube/home/org.gcube.portlets.user/test-home-library/Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan/hcaf_filtered.csv +2016-04-08 12:39:35 DEBUG query:56 - Sending query of namespace remotefs.fs.files on connection [connectionId{localValue:20, serverValue:1377300}] to server mongo3-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG query:56 - Query completed +2016-04-08 12:39:35 INFO MongoIO:186 - object found hcaf_filtered.csv +2016-04-08 12:39:35 DEBUG connection:56 - Closing connection connectionId{localValue:19, serverValue:1714977} +2016-04-08 12:39:35 DEBUG connection:56 - Closing connection connectionId{localValue:16, serverValue:1094346} +2016-04-08 12:39:35 INFO connection:71 - Closed connection [connectionId{localValue:20, serverValue:1377300}] to mongo3-d-d4s.d4science.org:27017 because the pool has been closed. +2016-04-08 12:39:35 DEBUG connection:56 - Closing connection connectionId{localValue:20, serverValue:1377300} +2016-04-08 12:39:35 DEBUG connection:56 - Closing connection connectionId{localValue:18, serverValue:1125562} +2016-04-08 12:39:35 INFO MongoIO:508 - Mongo has been closed +2016-04-08 12:39:35 DEBUG connection:56 - Closing connection connectionId{localValue:17, serverValue:1377299} +2016-04-08 12:39:35 DEBUG Operation:81 - PATH /gcube/home/org.gcube.portlets.user/test-home-library/Home/giancarlo.panichi/Workspace/StatisticalTest/DBScan/hcaf_filtered.csv +2016-04-08 12:39:35 INFO Operation:55 - URL generated: smp://data-d.d4science.org/8Yjj+HQc0gLVW2bfDjeAkOhqzg9XDTeiGmbP5+HKCzc= +2016-04-08 12:39:35 INFO Operation:57 - URL generated: smp://data-d.d4science.org/8Yjj+HQc0gLVW2bfDjeAkOhqzg9XDTeiGmbP5+HKCzc= +2016-04-08 12:39:35 DEBUG Operation:87 - translating: http://data-d.d4science.org/8Yjj+HQc0gLVW2bfDjeAkOhqzg9XDTeiGmbP5+HKCzc= +2016-04-08 12:39:35 DEBUG Operation:90 - base Url extracted is: http://data-d.d4science.org/ +2016-04-08 12:39:35 DEBUG Operation:93 - get params: http://data-d.d4science.org/ 8Yjj+HQc0gLVW2bfDjeAkOhqzg9XDTeiGmbP5+HKCzc= +2016-04-08 12:39:35 INFO Operation:99 - uri translated in http url: http://data-d.d4science.org/OFlqaitIUWMwZ0xWVzJiZkRqZUFrT2hxemc5WERUZWlHbWJQNStIS0N6Yz0 +2016-04-08 12:39:35 INFO Operation:65 - URL translated: http://data-d.d4science.org/OFlqaitIUWMwZ0xWVzJiZkRqZUFrT2hxemc5WERUZWlHbWJQNStIS0N6Yz0 +2016-04-08 12:39:35 DEBUG cluster:56 - Checking status of mongo2-d-d4s.d4science.org:27017 +2016-04-08 12:39:35 DEBUG cluster:56 - Updating cluster description to {type=REPLICA_SET, servers=[{address=mongo1-d-d4s.d4science.org:27017, type=REPLICA_SET_ARBITER, roundTripTime=1.0 ms, state=CONNECTED}, {address=mongo2-d-d4s.d4science.org:27017, type=REPLICA_SET_OTHER, roundTripTime=1.5 ms, state=CONNECTED}, {address=mongo3-d-d4s.d4science.org:27017, type=REPLICA_SET_PRIMARY, roundTripTime=1.6 ms, state=CONNECTED}, {address=mongo4-d-d4s.d4science.org:27017, type=REPLICA_SET_SECONDARY, roundTripTime=1.4 ms, state=CONNECTED}] +2016-04-08 12:39:36 DEBUG connection:56 - Closing connection connectionId{localValue:13, serverValue:1714976} +2016-04-08 12:39:36 DEBUG connection:56 - Closing connection connectionId{localValue:14, serverValue:1125561} +2016-04-08 12:39:36 INFO connection:71 - Closed connection [connectionId{localValue:15, serverValue:1377298}] to mongo3-d-d4s.d4science.org:27017 because the pool has been closed. +2016-04-08 12:39:36 DEBUG connection:56 - Closing connection connectionId{localValue:11, serverValue:1094345} +2016-04-08 12:39:36 DEBUG connection:56 - Closing connection connectionId{localValue:15, serverValue:1377298} +2016-04-08 12:39:36 DEBUG connection:56 - Closing connection connectionId{localValue:12, serverValue:1377297} +2016-04-08 12:39:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 12:39:59 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 12:40:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:40:54 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:41:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:41:49 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:42:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:42:02 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 12:42:02 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:42:02 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:42:02 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:42:02 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 12:42:02 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 12:42:02 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 12:42:02 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 12:42:02 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 12:42:02 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 12:42:02 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 12:42:02 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 12:42:02 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 12:42:02 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 12:42:02 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:42:02 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 12:42:02 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 12:42:02 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 12:42:02 DEBUG WPS2SM:201 - Schema: null +2016-04-08 12:42:02 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 12:42:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 12:42:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 12:42:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:42:02 DEBUG SClient4WPS:680 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 12:42:02 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 12:42:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:42:02 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:42:02 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 12:42:02 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 12:42:02 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 12:42:02 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 12:42:02 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 12:42:02 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 12:42:02 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 12:42:02 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 12:42:02 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 12:42:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 12:42:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 12:42:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:42:02 DEBUG SClient4WPS:680 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 12:42:02 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 12:42:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:42:02 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 12:42:02 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 12:42:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 12:42:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 12:42:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:42:02 DEBUG SClient4WPS:680 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 12:42:02 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 12:42:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:42:02 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:42:02 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 12:42:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 12:42:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 12:42:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:42:02 DEBUG SClient4WPS:680 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 12:42:02 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 12:42:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 12:42:02 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 12:42:02 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 12:42:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 12:42:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 12:42:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 12:42:02 DEBUG SClient4WPS:680 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 12:42:02 DEBUG SClient4WPS:688 - UserInputs= key:OccurrencePointsTable, value=http://data-d.d4science.org/OFlqaitIUWMwZ0xWVzJiZkRqZUFrT2hxemc5WERUZWlHbWJQNStIS0N6Yz0 +2016-04-08 12:42:02 DEBUG SClient4WPS:688 - UserInputs= key:FeaturesColumnNames, value=depthsd +2016-04-08 12:42:02 DEBUG SClient4WPS:688 - UserInputs= key:OccurrencePointsClusterLabel, value=OccCluster_ +2016-04-08 12:42:02 DEBUG SClient4WPS:688 - UserInputs= key:epsilon, value=10 +2016-04-08 12:42:02 DEBUG SClient4WPS:688 - UserInputs= key:min_points, value=1 +2016-04-08 12:42:02 DEBUG SClient4WPS:747 - Configuring Complex: OccurrencePointsTable to: http://data-d.d4science.org/OFlqaitIUWMwZ0xWVzJiZkRqZUFrT2hxemc5WERUZWlHbWJQNStIS0N6Yz0 +2016-04-08 12:42:02 DEBUG SClient4WPS:730 - Configuring Literal: FeaturesColumnNames to: depthsd +2016-04-08 12:42:02 DEBUG SClient4WPS:730 - Configuring Literal: OccurrencePointsClusterLabel to: OccCluster_ +2016-04-08 12:42:02 DEBUG SClient4WPS:730 - Configuring Literal: epsilon to: 10 +2016-04-08 12:42:02 DEBUG SClient4WPS:730 - Configuring Literal: min_points to: 1 +2016-04-08 12:42:02 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 12:42:03 DEBUG SClient4WPS:379 - Sending: + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + + + OccurrencePointsTable + + + + FeaturesColumnNames + + depthsd + + + + OccurrencePointsClusterLabel + + OccCluster_ + + + + epsilon + + 10 + + + + min_points + + 1 + + + + + + + OutputTable + + + non_deterministic_output + + + + +2016-04-08 12:42:03 DEBUG StatWPSClientSession:420 - RetrieveDataViaPost(): http://dataminer1-d-d4s.d4science.org:80/wps/WebProcessingService? +2016-04-08 12:42:03 DEBUG SClient4WPS:398 - Retrieved ProcessLocation: http://dataminer1-d-d4s.d4science.org:80/wps/RetrieveResultServlet?id=6dbdfe91-1d5e-45f4-bea2-e4c295fac388 +2016-04-08 12:42:03 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 12:42:03 DEBUG SClient4WPS:767 - Starting Process: http://dataminer1-d-d4s.d4science.org:80/wps/RetrieveResultServlet?id=6dbdfe91-1d5e-45f4-bea2-e4c295fac388 +2016-04-08 12:42:03 DEBUG SClient4WPS:694 - Stated Computation ProcessLocation:http://dataminer1-d-d4s.d4science.org:80/wps/RetrieveResultServlet?id=6dbdfe91-1d5e-45f4-bea2-e4c295fac388 +2016-04-08 12:42:03 DEBUG SClient4WPS:705 - ComputationId: ComputationId [id=6dbdfe91-1d5e-45f4-bea2-e4c295fac388, urlId=http://dataminer1-d-d4s.d4science.org:80/wps/RetrieveResultServlet?id=6dbdfe91-1d5e-45f4-bea2-e4c295fac388] +2016-04-08 12:42:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:42:13 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:42:13 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 12:42:13 DEBUG SClient4WPS:783 - GetComputationStatus(): ComputationId=ComputationId [id=6dbdfe91-1d5e-45f4-bea2-e4c295fac388, urlId=http://dataminer1-d-d4s.d4science.org:80/wps/RetrieveResultServlet?id=6dbdfe91-1d5e-45f4-bea2-e4c295fac388] +2016-04-08 12:42:13 INFO StatWPSClientSession:114 - CONNECT +2016-04-08 12:42:13 WARN StatWPSClientSession:120 - retrieving caps failed, caps are null +2016-04-08 12:42:13 DEBUG StatWPSClientSession:554 - ExecuteViaGet() Url: http://dataminer1-d-d4s.d4science.org:80/wps/RetrieveResultServlet?id=6dbdfe91-1d5e-45f4-bea2-e4c295fac388 +2016-04-08 12:42:13 DEBUG StatWPSClientSession:558 - auth string: giancarlo.panichi:f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 12:42:13 DEBUG StatWPSClientSession:561 - Base64 encoded auth string: Z2lhbmNhcmxvLnBhbmljaGk6ZjA2NjY1OTctNDMwMi00OWNlLWJlYTItNTU1Yjk0ZTU2OWNi +2016-04-08 12:42:13 DEBUG StatWPSClientSession:576 - ExecuteAsGETString as Document +2016-04-08 12:42:13 DEBUG SClient4WPS:799 - ComputationStatus ResponseObject: + + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + + + + + + java.lang.RuntimeException: DBScan: Error incomplete parameters + + + + + +2016-04-08 12:42:13 DEBUG SClient4WPS:844 - WPS FAILURE: + + java.lang.RuntimeException: DBScan: Error incomplete parameters + + OR PAUSED: null +2016-04-08 12:42:13 DEBUG SClient4WPS:882 - ComputationStatus: ComputationStatus [percentage=100.0, status=FAILED, endDate=null, message=null, errResource=null] +2016-04-08 12:42:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:42:44 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:43:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:43:39 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:44:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 12:44:34 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 12:45:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:45:29 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:46:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:46:24 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:47:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 12:47:19 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 12:48:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:48:14 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:49:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:49:09 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:50:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:50:04 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:50:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:50:59 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:51:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:51:54 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:52:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 12:52:49 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 12:53:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:53:44 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:54:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:54:39 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:55:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 12:55:34 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 12:56:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 12:56:29 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 12:57:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:57:24 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:58:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 12:58:19 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 12:59:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 12:59:14 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 13:00:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 13:00:09 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 13:01:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:01:04 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 13:01:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:01:59 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 13:02:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:02:54 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 13:03:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:03:49 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 13:04:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:04:44 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 13:05:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:05:39 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 13:06:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:06:34 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 13:07:29 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 13:07:29 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 13:07:29 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 13:07:29 WARN SessionCheckerServiceImpl:80 - Scope is null at Fri Apr 08 13:07:29 CEST 2016 +2016-04-08 13:07:29 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 13:09:57 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 13:09:57 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 13:09:57 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 13:09:57 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 13:09:57 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 13:09:57 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 13:09:57 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 13:09:57 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@3ec68e5a +2016-04-08 13:09:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:09:57 INFO ASLSession:352 - Logging the entrance +2016-04-08 13:09:57 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 13:09:57 DEBUG TemplateModel:83 - 2016-04-08 13:09:57, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 13:09:57 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 13:09:57 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 13:10:05 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 13:10:05 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 13:10:05 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 13:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:10:05 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 13:10:05 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 13:10:05 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 13:10:05 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 113 ms +2016-04-08 13:10:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 13:10:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 13:10:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 13:10:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 13:10:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 13:10:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 13:10:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 13:10:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 13:10:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 13:10:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 13:10:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 13:10:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 13:10:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 13:10:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 13:10:05 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 13:10:05 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:10:05 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 13:10:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@b814146 +2016-04-08 13:10:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@78d58a16 +2016-04-08 13:10:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@34c16377 +2016-04-08 13:10:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@57ab0a96 +2016-04-08 13:10:05 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 111 ms +2016-04-08 13:10:05 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 13:10:05 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 13:10:05 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 13:10:05 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 13:10:05 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 13:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:10:05 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:10:05 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 13:10:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 26 ms +2016-04-08 13:10:06 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 13:10:06 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 13:10:06 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 13:10:06 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 13:10:07 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 13:10:07 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 13:10:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 13:10:09 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 13:10:09 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 13:10:09 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 13:10:09 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 13:10:09 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 13:10:10 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 13:10:10 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 13:10:10 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 13:10:10 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 13:10:10 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 13:10:10 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 13:10:10 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 13:10:10 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 13:10:10 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 13:10:10 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 13:10:10 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 13:10:10 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 13:10:10 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 13:10:10 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 13:10:10 DEBUG WPS2SM:201 - Schema: null +2016-04-08 13:10:10 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 13:10:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 13:10:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 13:10:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:10:10 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 13:10:10 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 13:10:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 13:10:10 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 13:10:10 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 13:10:10 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 13:10:10 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 13:10:10 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 13:10:10 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 13:10:10 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 13:10:10 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 13:10:10 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 13:10:10 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 13:10:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 13:10:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 13:10:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:10:10 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 13:10:10 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 13:10:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 13:10:10 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 13:10:10 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 13:10:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 13:10:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 13:10:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:10:10 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 13:10:10 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 13:10:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 13:10:10 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 13:10:10 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 13:10:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 13:10:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 13:10:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:10:10 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 13:10:10 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 13:10:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 13:10:10 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 13:10:10 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 13:10:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 13:10:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 13:10:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:10:10 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 13:10:10 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 13:10:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:10:10 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 13:10:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 13:10:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:10:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 13:10:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 13:10:10 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 13:10:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 13:10:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 13:10:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 13:10:10 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 13:10:10 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 13:10:10 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 13:10:10 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 13:10:10 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 13:10:10 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 13:10:10 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 13:10:10 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 13:10:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 13:10:10 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:10:10 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 13:10:10 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 82 ms +2016-04-08 13:10:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:10:10 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 13:10:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:10:10 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 13:10:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:10:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 13:10:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 13:10:10 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 13:10:10 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 13:10:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:10:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 13:10:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 13:10:10 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 13:10:10 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 13:10:10 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 13:10:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 13:10:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 13:10:10 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 13:10:10 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 13:10:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 13:10:10 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 13:10:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 13:10:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 13:10:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 13:10:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 13:10:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 13:10:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 13:10:10 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 13:10:10 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:10:10 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 13:10:11 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 13:10:11 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 13:10:11 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 13:10:11 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 13:10:11 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 13:10:11 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 13:10:11 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 13:10:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:10:11 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 13:10:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:10:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:10:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:10:11 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 13:10:11 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 13:10:11 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 13:10:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:10:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:10:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:10:11 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 13:10:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:10:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:10:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:10:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 13:10:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 13:10:11 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 13:10:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:10:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:10:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:10:11 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 13:10:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:10:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:10:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:10:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:10:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:10:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:10:11 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 13:10:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 13:10:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:10:11 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:10:11 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 13:10:11 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 21 ms +2016-04-08 13:10:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 13:10:11 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:10:11 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 13:10:11 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 18 ms +2016-04-08 13:10:11 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 13:10:11 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 13:10:11 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 13:10:11 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 13:10:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 13:10:11 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 13:10:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 13:10:11 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:10:11 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 13:10:12 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 19 ms +2016-04-08 13:10:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 13:10:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 13:10:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 13:10:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:10:12 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 13:10:12 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 13:10:12 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 13:10:12 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 13:10:12 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 13:10:12 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 13:10:12 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 13:10:12 INFO WorkspaceExplorerServiceImpl:142 - end time - 491 msc 0 sec +2016-04-08 13:10:12 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 13:11:09 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 13:11:09 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 13:11:09 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 13:11:09 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 13:11:09 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 13:11:09 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 13:11:09 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 13:11:09 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7138aa33 +2016-04-08 13:11:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 13:11:09 INFO ASLSession:352 - Logging the entrance +2016-04-08 13:11:09 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 13:11:09 DEBUG TemplateModel:83 - 2016-04-08 13:11:09, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 13:11:09 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 13:11:09 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 13:11:12 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 13:11:12 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 13:11:12 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 13:11:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 13:11:12 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 13:11:12 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 13:11:13 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 13:11:13 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 135 ms +2016-04-08 13:11:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 13:11:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 13:11:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 13:11:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 13:11:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 13:11:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 13:11:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 13:11:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 13:11:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 13:11:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 13:11:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 13:11:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 13:11:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 13:11:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 13:11:13 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 13:11:13 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:11:13 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 13:11:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@1e816764 +2016-04-08 13:11:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@483af90 +2016-04-08 13:11:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@e1b1e1d +2016-04-08 13:11:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@216f7813 +2016-04-08 13:11:13 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 120 ms +2016-04-08 13:11:13 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 13:11:13 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 13:11:13 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 13:11:13 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 13:11:13 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 13:11:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 13:11:13 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:11:13 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 13:11:13 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 31 ms +2016-04-08 13:11:13 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 13:11:13 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 13:11:13 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 13:11:13 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 13:11:14 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 13:11:14 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 13:11:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 13:11:18 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 13:11:18 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 13:11:18 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 13:11:18 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 13:11:18 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 13:11:19 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 13:11:19 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 13:11:19 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 13:11:19 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 13:11:19 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 13:11:19 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 13:11:19 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 13:11:19 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 13:11:19 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 13:11:19 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 13:11:19 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 13:11:19 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 13:11:19 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 13:11:19 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 13:11:19 DEBUG WPS2SM:201 - Schema: null +2016-04-08 13:11:19 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 13:11:19 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 13:11:19 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 13:11:19 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:11:19 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 13:11:19 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 13:11:19 DEBUG WPS2SM:93 - WPS type: +2016-04-08 13:11:19 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 13:11:19 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 13:11:19 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 13:11:19 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 13:11:19 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 13:11:19 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 13:11:19 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 13:11:19 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 13:11:19 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 13:11:19 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 13:11:19 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 13:11:19 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 13:11:19 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:11:19 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 13:11:19 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 13:11:19 DEBUG WPS2SM:93 - WPS type: +2016-04-08 13:11:19 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 13:11:19 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 13:11:19 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 13:11:19 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 13:11:19 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:11:19 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 13:11:19 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 13:11:19 DEBUG WPS2SM:93 - WPS type: +2016-04-08 13:11:19 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 13:11:19 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 13:11:19 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 13:11:19 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 13:11:19 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:11:19 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 13:11:19 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 13:11:19 DEBUG WPS2SM:93 - WPS type: +2016-04-08 13:11:19 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 13:11:19 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 13:11:19 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 13:11:19 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 13:11:19 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:11:19 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 13:11:19 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 13:11:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 13:11:19 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 13:11:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 13:11:19 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 13:11:19 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 13:11:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:11:19 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 13:11:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:11:19 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 13:11:19 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 13:11:19 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 13:11:19 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 13:11:19 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 13:11:19 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 13:11:19 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 13:11:19 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 13:11:19 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 13:11:19 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 13:11:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 13:11:19 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:11:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 13:11:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 120 ms +2016-04-08 13:11:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:11:19 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 13:11:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:11:19 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 13:11:19 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 13:11:19 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 13:11:19 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 13:11:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 13:11:19 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 13:11:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 13:11:19 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 13:11:19 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 13:11:19 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 13:11:19 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 13:11:19 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 13:11:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:11:19 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 13:11:19 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 13:11:19 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 13:11:19 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 13:11:19 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 13:11:19 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 13:11:19 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:11:19 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 13:11:19 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 13:11:19 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 13:11:19 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 13:11:19 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 13:11:19 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 13:11:19 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 13:11:20 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 13:11:20 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 13:11:20 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 13:11:20 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 13:11:20 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 13:11:20 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 13:11:20 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 13:11:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:11:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 13:11:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:11:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:11:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 13:11:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:11:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 13:11:20 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 13:11:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:11:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 13:11:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:11:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:11:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:11:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:11:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:11:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 13:11:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:11:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 13:11:20 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 13:11:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:11:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 13:11:20 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 13:11:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:11:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:11:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:11:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:11:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:11:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:11:20 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 13:11:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 13:11:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:11:20 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:11:20 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 13:11:20 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 30 ms +2016-04-08 13:11:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 13:11:20 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:11:20 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 13:11:20 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 13:11:20 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 13:11:20 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 13:11:20 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 13:11:20 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 13:11:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 13:11:20 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 13:11:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 13:11:20 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:11:20 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 13:11:20 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-08 13:11:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 13:11:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 13:11:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 13:11:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:11:20 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 13:11:20 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 13:11:20 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 13:11:20 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 13:11:20 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 13:11:20 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 13:11:20 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 13:11:20 INFO WorkspaceExplorerServiceImpl:142 - end time - 442 msc 0 sec +2016-04-08 13:11:20 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 13:12:24 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 13:12:24 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 13:12:24 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 13:12:24 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 13:12:24 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 13:12:24 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 13:12:24 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 13:12:24 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@5d7497c9 +2016-04-08 13:12:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:12:24 INFO ASLSession:352 - Logging the entrance +2016-04-08 13:12:24 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 13:12:24 DEBUG TemplateModel:83 - 2016-04-08 13:12:24, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 13:12:24 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 13:12:24 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 13:12:30 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 13:12:30 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 13:12:30 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 13:12:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:12:30 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 13:12:30 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 13:12:30 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 13:12:30 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 133 ms +2016-04-08 13:12:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 13:12:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 13:12:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 13:12:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 13:12:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 13:12:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 13:12:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 13:12:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 13:12:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 13:12:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 13:12:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 13:12:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 13:12:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 13:12:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 13:12:30 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 13:12:30 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:12:30 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 13:12:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@28871950 +2016-04-08 13:12:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@5bc79e64 +2016-04-08 13:12:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@685c09bb +2016-04-08 13:12:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@2aa45d6e +2016-04-08 13:12:30 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 105 ms +2016-04-08 13:12:30 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 13:12:31 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 13:12:31 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 13:12:31 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 13:12:31 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 13:12:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:12:31 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:12:31 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 13:12:31 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 27 ms +2016-04-08 13:12:31 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 13:12:31 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 13:12:31 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 13:12:31 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 13:12:31 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 13:12:31 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 13:12:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:12:35 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 13:12:35 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 13:12:35 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 13:12:35 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 13:12:35 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 13:12:35 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 13:12:35 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 13:12:35 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 13:12:35 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 13:12:35 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 13:12:35 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 13:12:35 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 13:12:35 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 13:12:35 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 13:12:35 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 13:12:35 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 13:12:35 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 13:12:35 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 13:12:35 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 13:12:35 DEBUG WPS2SM:201 - Schema: null +2016-04-08 13:12:35 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 13:12:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 13:12:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 13:12:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:12:35 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 13:12:35 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 13:12:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 13:12:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 13:12:35 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 13:12:35 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 13:12:35 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 13:12:35 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 13:12:35 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 13:12:35 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 13:12:35 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 13:12:35 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 13:12:35 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 13:12:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 13:12:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 13:12:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:12:35 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 13:12:35 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 13:12:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 13:12:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 13:12:35 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 13:12:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 13:12:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 13:12:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:12:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 13:12:35 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 13:12:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 13:12:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 13:12:35 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 13:12:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 13:12:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 13:12:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:12:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 13:12:35 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 13:12:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 13:12:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 13:12:35 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 13:12:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 13:12:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 13:12:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:12:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 13:12:35 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 13:12:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 13:12:35 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 13:12:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 13:12:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 13:12:35 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 13:12:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 13:12:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 13:12:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 13:12:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 13:12:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 13:12:35 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 13:12:35 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 13:12:35 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 13:12:35 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 13:12:35 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 13:12:35 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 13:12:35 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 13:12:35 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 13:12:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 13:12:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:12:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 13:12:36 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 78 ms +2016-04-08 13:12:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:12:36 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 13:12:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:12:36 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 13:12:36 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 13:12:36 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 13:12:36 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 13:12:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:12:36 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 13:12:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:12:36 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 13:12:36 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 13:12:36 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 13:12:36 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 13:12:36 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 13:12:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 13:12:36 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 13:12:36 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 13:12:36 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 13:12:36 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 13:12:36 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 13:12:36 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 13:12:36 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 13:12:36 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 13:12:36 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 13:12:36 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 13:12:36 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 13:12:36 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 13:12:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:12:36 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 13:12:36 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 13:12:36 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 13:12:36 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 13:12:36 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 13:12:36 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 13:12:36 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 13:12:36 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 13:12:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:12:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:12:36 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 13:12:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:12:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:12:36 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 13:12:36 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 13:12:36 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 13:12:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:12:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:12:36 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 13:12:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:12:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:12:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:12:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:12:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 13:12:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:12:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:12:36 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 13:12:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:12:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 13:12:36 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 13:12:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:12:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:12:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:12:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:12:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:12:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:12:36 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 13:12:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 13:12:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:12:37 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:12:37 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 13:12:37 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 30 ms +2016-04-08 13:12:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 13:12:37 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:12:37 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 13:12:37 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 13:12:37 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 13:12:37 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 13:12:37 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 13:12:37 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 13:12:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 13:12:37 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 13:12:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 13:12:37 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:12:37 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 13:12:37 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 15 ms +2016-04-08 13:12:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 13:12:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 13:12:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 13:12:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:12:37 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 13:12:37 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 13:12:37 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 13:12:37 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 13:12:37 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 13:12:37 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 13:12:37 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 13:12:37 INFO WorkspaceExplorerServiceImpl:142 - end time - 444 msc 0 sec +2016-04-08 13:12:37 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 13:13:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:13:19 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 13:14:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:14:14 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 13:15:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:15:09 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 13:16:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:16:04 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 13:16:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:16:59 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 13:17:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:17:54 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 13:18:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:18:49 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 13:21:59 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 13:21:59 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 13:21:59 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 13:21:59 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 13:21:59 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 13:21:59 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 13:21:59 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 13:21:59 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@1ca260bb +2016-04-08 13:21:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:21:59 INFO ASLSession:352 - Logging the entrance +2016-04-08 13:21:59 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 13:21:59 DEBUG TemplateModel:83 - 2016-04-08 13:21:59, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 13:21:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 13:21:59 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 13:22:04 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 13:22:04 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 13:22:04 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 13:22:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:22:04 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 13:22:04 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 13:22:04 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 13:22:04 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 132 ms +2016-04-08 13:22:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 13:22:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 13:22:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 13:22:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 13:22:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 13:22:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 13:22:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 13:22:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 13:22:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 13:22:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 13:22:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 13:22:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 13:22:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 13:22:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 13:22:05 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 13:22:05 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:22:05 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 13:22:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@21b3c70c +2016-04-08 13:22:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@295f6122 +2016-04-08 13:22:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@1da86f20 +2016-04-08 13:22:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@60c6b1e2 +2016-04-08 13:22:05 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 260 ms +2016-04-08 13:22:05 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 13:22:05 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 13:22:05 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 13:22:05 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 13:22:05 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 13:22:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:22:05 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:22:05 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 13:22:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 26 ms +2016-04-08 13:22:06 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 13:22:06 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 13:22:06 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 13:22:06 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 13:22:07 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 13:22:07 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 13:22:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:22:10 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 13:22:10 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 13:22:10 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 13:22:10 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 13:22:10 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 13:22:10 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 13:22:10 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 13:22:10 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 13:22:10 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 13:22:10 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 13:22:10 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 13:22:10 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 13:22:10 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 13:22:10 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 13:22:10 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 13:22:10 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 13:22:10 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 13:22:10 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 13:22:10 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 13:22:10 DEBUG WPS2SM:201 - Schema: null +2016-04-08 13:22:10 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 13:22:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 13:22:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 13:22:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:22:10 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 13:22:10 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 13:22:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 13:22:10 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 13:22:10 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 13:22:10 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 13:22:10 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 13:22:10 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 13:22:10 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 13:22:10 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 13:22:10 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 13:22:10 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 13:22:10 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 13:22:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 13:22:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 13:22:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:22:10 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 13:22:10 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 13:22:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 13:22:10 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 13:22:10 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 13:22:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 13:22:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 13:22:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:22:10 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 13:22:10 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 13:22:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 13:22:10 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 13:22:10 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 13:22:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 13:22:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 13:22:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:22:10 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 13:22:10 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 13:22:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 13:22:10 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 13:22:10 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 13:22:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 13:22:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 13:22:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 13:22:10 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 13:22:10 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 13:22:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:22:10 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 13:22:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:22:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 13:22:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 13:22:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:22:10 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 13:22:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:22:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 13:22:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 13:22:10 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 13:22:10 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 13:22:10 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 13:22:10 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 13:22:10 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 13:22:10 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 13:22:10 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 13:22:10 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 13:22:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 13:22:10 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:22:10 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 13:22:11 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 83 ms +2016-04-08 13:22:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:22:11 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 13:22:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:22:11 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 13:22:11 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 13:22:11 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 13:22:11 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 13:22:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:22:11 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 13:22:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:22:11 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 13:22:11 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 13:22:11 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 13:22:11 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 13:22:11 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 13:22:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:22:11 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 13:22:11 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 13:22:11 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 13:22:11 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 13:22:11 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 13:22:11 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 13:22:11 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 13:22:11 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 13:22:11 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 13:22:11 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 13:22:11 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 13:22:11 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 13:22:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:22:11 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 13:22:11 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 13:22:12 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 13:22:12 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 13:22:12 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 13:22:12 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 13:22:12 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 13:22:12 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 13:22:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:22:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:22:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:22:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 13:22:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 13:22:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 13:22:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 13:22:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:22:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 13:22:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:22:12 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 13:22:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:22:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:22:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:22:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:22:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 13:22:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:22:12 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 13:22:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:22:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:22:12 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 13:22:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:22:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:22:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:22:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:22:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 13:22:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:22:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 13:22:12 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 13:22:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 13:22:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:22:12 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:22:12 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 13:22:12 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 31 ms +2016-04-08 13:22:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 13:22:12 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:22:12 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 13:22:12 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-08 13:22:12 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 13:22:12 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 13:22:12 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 13:22:12 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 13:22:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 13:22:12 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 13:22:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 13:22:12 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 13:22:12 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 13:22:12 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 13:22:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 13:22:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 13:22:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 13:22:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:22:12 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 13:22:12 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 13:22:12 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 13:22:12 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 13:22:12 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 13:22:12 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 13:22:12 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 13:22:12 INFO WorkspaceExplorerServiceImpl:142 - end time - 479 msc 0 sec +2016-04-08 13:22:12 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 13:22:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:22:54 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 13:23:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:23:49 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 13:24:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 13:24:44 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 13:25:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:25:39 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 13:26:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:26:34 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 13:27:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:27:29 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 13:28:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:28:24 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 13:29:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:29:19 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 13:30:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 13:30:14 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 13:31:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:31:09 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 13:32:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:32:04 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 13:32:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:32:59 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 13:33:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 13:33:54 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 13:34:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 13:34:49 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 13:35:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 13:35:44 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 13:36:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 13:36:39 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 13:37:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 13:37:34 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 13:38:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 13:38:29 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 13:39:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:39:24 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 13:40:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:40:19 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 13:41:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:41:14 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 13:42:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 13:42:09 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 13:43:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:43:04 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 13:43:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 13:43:59 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 13:44:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:44:54 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 13:45:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:45:49 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 13:46:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:46:44 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 13:47:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 13:47:39 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 13:48:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 13:48:34 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 13:49:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 13:49:29 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 13:50:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 13:50:24 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 13:51:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 13:51:19 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 13:52:14 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 13:52:14 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 13:52:14 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 13:52:14 WARN SessionCheckerServiceImpl:80 - Scope is null at Fri Apr 08 13:52:14 CEST 2016 +2016-04-08 13:52:14 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 13:56:59 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 13:56:59 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 13:56:59 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 15:28:49 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 15:28:49 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 15:28:49 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 15:28:49 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 15:28:49 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 15:28:49 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 15:28:49 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 15:28:49 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@392f6f34 +2016-04-08 15:28:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:28:49 INFO ASLSession:352 - Logging the entrance +2016-04-08 15:28:49 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 15:28:49 DEBUG TemplateModel:83 - 2016-04-08 15:28:49, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 15:28:49 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 15:28:49 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 15:28:55 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 15:28:55 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 15:28:55 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 15:28:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:28:55 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 15:28:55 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 15:28:55 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 15:28:55 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 134 ms +2016-04-08 15:28:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 15:28:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 15:28:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 15:28:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 15:28:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 15:28:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 15:28:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 15:28:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 15:28:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 15:28:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 15:28:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 15:28:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 15:28:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 15:28:55 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 15:28:56 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 15:28:56 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:28:56 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 15:28:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@45b65f70 +2016-04-08 15:28:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@4641556c +2016-04-08 15:28:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@3e1b101 +2016-04-08 15:28:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@10eb0d28 +2016-04-08 15:28:56 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 110 ms +2016-04-08 15:28:56 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 15:28:56 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 15:28:56 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 15:28:56 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 15:28:56 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 15:28:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:28:56 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:28:56 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 15:28:56 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 35 ms +2016-04-08 15:28:56 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 15:28:56 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:28:56 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 15:28:56 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 15:28:57 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:28:57 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 15:29:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 15:29:03 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 15:29:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 15:29:03 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 15:29:03 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:29:03 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 15:29:03 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 15:29:03 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 15:29:03 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 15:29:03 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 15:29:03 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 15:29:03 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 15:29:03 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 15:29:03 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 15:29:03 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 15:29:03 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 15:29:03 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:29:03 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 15:29:03 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 15:29:03 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 15:29:03 DEBUG WPS2SM:201 - Schema: null +2016-04-08 15:29:03 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 15:29:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 15:29:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 15:29:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:29:03 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 15:29:03 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 15:29:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:29:03 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 15:29:03 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 15:29:03 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 15:29:03 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 15:29:03 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 15:29:03 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 15:29:03 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 15:29:03 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 15:29:03 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 15:29:03 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 15:29:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 15:29:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 15:29:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:29:03 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 15:29:03 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 15:29:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:29:03 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 15:29:03 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 15:29:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 15:29:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 15:29:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:29:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 15:29:03 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 15:29:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:29:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 15:29:03 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 15:29:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 15:29:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 15:29:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:29:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 15:29:03 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 15:29:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:29:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 15:29:03 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 15:29:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 15:29:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 15:29:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:29:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 15:29:03 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 15:29:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:29:04 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 15:29:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:29:04 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 15:29:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:29:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:29:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:29:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:29:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:29:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:29:04 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 15:29:04 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 15:29:04 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 15:29:04 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 15:29:04 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 15:29:04 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 15:29:04 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 15:29:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 15:29:04 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 15:29:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:29:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 15:29:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 15:29:04 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 15:29:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:29:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:29:04 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 15:29:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:29:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:29:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:29:04 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 15:29:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:29:04 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 15:29:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:29:04 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 15:29:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:29:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:29:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:29:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:29:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:29:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:29:04 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 15:29:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:29:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:29:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:29:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:29:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:29:04 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 15:29:04 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 15:29:04 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 15:29:04 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 15:29:04 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 15:29:04 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 15:29:04 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 15:29:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:29:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:29:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:29:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:29:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:29:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:29:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:29:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:29:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:29:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:29:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:29:04 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 15:29:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:29:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:29:04 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 15:29:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:29:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:29:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:29:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:29:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 15:29:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:29:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:29:05 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 15:29:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:29:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:29:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:29:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:29:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:29:06 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 15:29:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 15:29:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:29:06 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:29:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 15:29:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 20 ms +2016-04-08 15:29:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 15:29:06 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:29:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 15:29:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-08 15:29:06 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 15:29:06 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 15:29:06 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 15:29:06 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 15:29:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 15:29:06 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 15:29:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 15:29:06 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:29:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 15:29:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 15:29:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 15:29:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 15:29:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 15:29:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:29:06 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 15:29:06 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 15:29:06 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 15:29:06 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 15:29:06 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 15:29:06 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 15:29:06 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 15:29:06 INFO WorkspaceExplorerServiceImpl:142 - end time - 414 msc 0 sec +2016-04-08 15:29:06 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 15:29:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:29:44 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 15:30:28 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 15:30:28 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 15:30:28 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 15:30:28 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 15:30:28 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 15:30:28 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 15:30:28 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 15:30:28 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@83cb66d +2016-04-08 15:30:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:30:28 INFO ASLSession:352 - Logging the entrance +2016-04-08 15:30:28 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 15:30:28 DEBUG TemplateModel:83 - 2016-04-08 15:30:28, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 15:30:28 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 15:30:28 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 15:30:32 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 15:30:32 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 15:30:32 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 15:30:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:30:32 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 15:30:32 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 15:30:32 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 15:30:32 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 136 ms +2016-04-08 15:30:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 15:30:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 15:30:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 15:30:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 15:30:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 15:30:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 15:30:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 15:30:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 15:30:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 15:30:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 15:30:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 15:30:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 15:30:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 15:30:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 15:30:32 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 15:30:32 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:30:32 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 15:30:32 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@187b3b8c +2016-04-08 15:30:32 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1a91092f +2016-04-08 15:30:32 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@3ce82799 +2016-04-08 15:30:32 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@28642d54 +2016-04-08 15:30:33 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 107 ms +2016-04-08 15:30:33 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 15:30:33 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 15:30:33 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 15:30:33 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 15:30:33 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 15:30:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:30:33 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:30:33 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 15:30:33 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 20 ms +2016-04-08 15:30:33 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 15:30:33 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:30:33 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 15:30:33 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 15:30:33 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:30:33 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 15:30:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:30:37 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 15:30:37 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 15:30:37 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 15:30:37 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:30:37 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 15:30:37 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 15:30:37 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 15:30:37 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 15:30:37 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 15:30:37 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 15:30:37 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 15:30:37 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 15:30:37 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 15:30:37 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 15:30:37 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 15:30:37 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:30:37 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 15:30:37 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 15:30:37 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 15:30:37 DEBUG WPS2SM:201 - Schema: null +2016-04-08 15:30:37 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 15:30:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 15:30:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 15:30:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:30:37 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 15:30:37 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 15:30:37 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:30:37 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 15:30:37 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 15:30:37 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 15:30:37 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 15:30:37 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 15:30:37 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 15:30:37 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 15:30:37 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 15:30:37 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 15:30:37 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 15:30:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 15:30:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 15:30:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:30:37 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 15:30:37 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 15:30:37 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:30:37 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 15:30:37 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 15:30:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 15:30:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 15:30:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:30:37 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 15:30:37 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 15:30:37 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:30:37 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 15:30:37 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 15:30:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 15:30:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 15:30:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:30:37 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 15:30:37 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 15:30:37 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:30:37 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 15:30:37 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 15:30:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 15:30:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 15:30:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:30:37 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 15:30:37 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 15:30:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 15:30:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 15:30:37 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 15:30:37 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 15:30:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 15:30:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 15:30:37 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:30:37 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:30:37 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:30:37 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:30:37 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 15:30:37 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 15:30:37 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 15:30:37 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 15:30:37 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 15:30:38 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 15:30:38 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 15:30:38 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 15:30:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 15:30:38 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:30:38 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 15:30:38 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 15:30:38 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 15:30:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 15:30:38 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 15:30:38 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:30:38 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:30:38 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 15:30:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:30:38 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 15:30:38 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:30:38 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:30:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:30:38 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 15:30:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:30:38 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:30:38 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:30:38 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:30:38 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:30:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:30:38 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 15:30:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:30:38 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:30:38 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:30:38 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:30:38 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:30:38 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 15:30:38 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 15:30:38 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 15:30:38 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 15:30:38 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 15:30:38 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 15:30:38 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 15:30:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:30:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:30:38 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:30:38 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:30:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:30:38 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:30:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:30:38 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:30:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:30:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:30:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:30:38 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 15:30:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:30:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:30:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:30:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 15:30:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:30:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:30:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 15:30:38 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 15:30:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:30:38 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 15:30:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:30:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:30:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:30:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:30:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:30:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:30:38 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 15:30:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 15:30:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:30:39 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:30:39 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 15:30:39 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 30 ms +2016-04-08 15:30:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 15:30:39 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:30:39 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 15:30:39 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 15:30:39 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 15:30:39 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 15:30:39 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 15:30:39 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 15:30:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 15:30:39 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 15:30:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 15:30:39 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:30:39 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 15:30:39 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 15:30:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 15:30:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 15:30:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 15:30:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:30:39 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 15:30:39 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 15:30:39 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 15:30:39 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 15:30:39 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 15:30:39 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 15:30:39 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 15:30:39 INFO WorkspaceExplorerServiceImpl:142 - end time - 454 msc 0 sec +2016-04-08 15:30:39 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 15:31:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:31:23 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 15:32:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 15:32:18 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 15:33:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 15:33:13 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 15:34:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:34:08 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 15:35:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:35:03 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 15:35:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 15:35:58 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 15:36:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:36:53 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 15:37:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:37:48 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 15:38:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:38:43 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 15:39:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:39:38 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 15:40:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:40:33 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 15:41:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:41:28 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 15:42:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:42:23 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 15:43:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:43:18 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 15:44:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:44:13 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 15:45:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:45:08 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 15:46:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 15:46:03 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 15:46:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 15:46:58 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 15:47:39 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 15:47:39 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 15:47:39 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 15:47:39 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 15:47:39 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 15:47:39 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 15:47:39 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 15:47:39 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2b4f9c5c +2016-04-08 15:47:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:47:39 INFO ASLSession:352 - Logging the entrance +2016-04-08 15:47:39 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 15:47:39 DEBUG TemplateModel:83 - 2016-04-08 15:47:39, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 15:47:39 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 15:47:39 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 15:47:45 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 15:47:45 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 15:47:45 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 15:47:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 15:47:45 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 15:47:45 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 15:47:45 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 15:47:45 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 123 ms +2016-04-08 15:47:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 15:47:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 15:47:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 15:47:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 15:47:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 15:47:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 15:47:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 15:47:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 15:47:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 15:47:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 15:47:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 15:47:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 15:47:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 15:47:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 15:47:45 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 15:47:45 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:47:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 15:47:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@4f19b58c +2016-04-08 15:47:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@5d310d7d +2016-04-08 15:47:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@f6e6249 +2016-04-08 15:47:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@51ed1947 +2016-04-08 15:47:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 94 ms +2016-04-08 15:47:45 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 15:47:45 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 15:47:45 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 15:47:45 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 15:47:45 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 15:47:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 15:47:45 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:47:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 15:47:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 24 ms +2016-04-08 15:47:45 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 15:47:45 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:47:45 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 15:47:45 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 15:47:46 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:47:46 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 15:47:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:47:49 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 15:47:49 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 15:47:49 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 15:47:49 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:47:49 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 15:47:50 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 15:47:50 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 15:47:50 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 15:47:50 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 15:47:50 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 15:47:50 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 15:47:50 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 15:47:50 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 15:47:50 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 15:47:50 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 15:47:50 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:47:50 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 15:47:50 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 15:47:50 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 15:47:50 DEBUG WPS2SM:201 - Schema: null +2016-04-08 15:47:50 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 15:47:50 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 15:47:50 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 15:47:50 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:47:50 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 15:47:50 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 15:47:50 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:47:50 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 15:47:50 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 15:47:50 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 15:47:50 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 15:47:50 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 15:47:50 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 15:47:50 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 15:47:50 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 15:47:50 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 15:47:50 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 15:47:50 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 15:47:50 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 15:47:50 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:47:50 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 15:47:50 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 15:47:50 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:47:50 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 15:47:50 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 15:47:50 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 15:47:50 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 15:47:50 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:47:50 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 15:47:50 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 15:47:50 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:47:50 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 15:47:50 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 15:47:50 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 15:47:50 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 15:47:50 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:47:50 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 15:47:50 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 15:47:50 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:47:50 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 15:47:50 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 15:47:50 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 15:47:50 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 15:47:50 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:47:50 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 15:47:50 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 15:47:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:47:50 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 15:47:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:47:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:47:50 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 15:47:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:47:50 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:47:50 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:47:50 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:47:50 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:47:50 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 15:47:50 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 15:47:50 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 15:47:50 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 15:47:50 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 15:47:50 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 15:47:50 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 15:47:50 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 15:47:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 15:47:50 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:47:50 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 15:47:50 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 20 ms +2016-04-08 15:47:50 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 15:47:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:47:50 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:47:50 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 15:47:50 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:47:50 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:47:50 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 15:47:50 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:47:50 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:47:50 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 15:47:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:47:50 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 15:47:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:47:50 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:47:50 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:47:50 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:47:50 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:47:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:47:50 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 15:47:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:47:50 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:47:50 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:47:50 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:47:50 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:47:51 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 15:47:51 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 15:47:51 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 15:47:51 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 15:47:51 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 15:47:51 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 15:47:51 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 15:47:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:47:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:47:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:47:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:47:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:47:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:47:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:47:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:47:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:47:51 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 15:47:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:47:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:47:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:47:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:47:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:47:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 15:47:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:47:51 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 15:47:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:47:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:47:51 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 15:47:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:47:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:47:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:47:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:47:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:47:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:47:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:47:51 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 15:47:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:47:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:47:51 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:47:51 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 15:47:51 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 22 ms +2016-04-08 15:47:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:47:51 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:47:51 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 15:47:51 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 22 ms +2016-04-08 15:47:51 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 15:47:51 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 15:47:51 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 15:47:51 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 15:47:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:47:51 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 15:47:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:47:51 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:47:51 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 15:47:51 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 34 ms +2016-04-08 15:47:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:47:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:47:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:47:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:47:51 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 15:47:51 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 15:47:51 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 15:47:51 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 15:47:51 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 15:47:51 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 15:47:51 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 15:47:51 INFO WorkspaceExplorerServiceImpl:142 - end time - 487 msc 0 sec +2016-04-08 15:47:51 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 15:48:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 15:48:34 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 15:49:27 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 15:49:27 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 15:49:27 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 15:49:27 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 15:49:27 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 15:49:27 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 15:49:27 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 15:49:27 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@3231c9e9 +2016-04-08 15:49:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 15:49:27 INFO ASLSession:352 - Logging the entrance +2016-04-08 15:49:27 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 15:49:27 DEBUG TemplateModel:83 - 2016-04-08 15:49:27, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 15:49:27 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 15:49:27 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 15:49:31 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 15:49:31 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 15:49:31 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 15:49:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 15:49:31 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 15:49:31 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 15:49:31 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 15:49:31 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 109 ms +2016-04-08 15:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 15:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 15:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 15:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 15:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 15:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 15:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 15:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 15:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 15:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 15:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 15:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 15:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 15:49:32 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 15:49:32 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 15:49:32 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:49:32 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 15:49:32 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@30962502 +2016-04-08 15:49:32 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@ed00bdf +2016-04-08 15:49:32 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@677742b +2016-04-08 15:49:32 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@7e5ebf9b +2016-04-08 15:49:32 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 121 ms +2016-04-08 15:49:32 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 15:49:32 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 15:49:32 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 15:49:32 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 15:49:32 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 15:49:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 15:49:32 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:49:32 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 15:49:32 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 26 ms +2016-04-08 15:49:32 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 15:49:32 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:49:32 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 15:49:32 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 15:49:33 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:49:33 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 15:49:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 15:49:36 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 15:49:36 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 15:49:36 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 15:49:36 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:49:36 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 15:49:36 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 15:49:36 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 15:49:36 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 15:49:36 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 15:49:36 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 15:49:36 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 15:49:36 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 15:49:36 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 15:49:36 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 15:49:36 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 15:49:36 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:49:36 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 15:49:36 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 15:49:36 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 15:49:36 DEBUG WPS2SM:201 - Schema: null +2016-04-08 15:49:36 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 15:49:36 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 15:49:36 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 15:49:36 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:49:36 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 15:49:36 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 15:49:36 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:49:36 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 15:49:36 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 15:49:36 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 15:49:36 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 15:49:36 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 15:49:36 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 15:49:36 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 15:49:36 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 15:49:36 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 15:49:36 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 15:49:36 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 15:49:36 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 15:49:36 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:49:36 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 15:49:36 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 15:49:36 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:49:36 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 15:49:36 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 15:49:36 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 15:49:36 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 15:49:36 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:49:36 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 15:49:36 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 15:49:36 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:49:36 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 15:49:36 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 15:49:36 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 15:49:36 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 15:49:36 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:49:36 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 15:49:36 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 15:49:36 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:49:36 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 15:49:36 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 15:49:36 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 15:49:36 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 15:49:36 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:49:36 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 15:49:36 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 15:49:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 15:49:36 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 15:49:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 15:49:36 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:49:36 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:49:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:49:36 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 15:49:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:49:36 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:49:36 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:49:36 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 15:49:36 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 15:49:36 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 15:49:36 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 15:49:36 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 15:49:36 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 15:49:36 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 15:49:36 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 15:49:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 15:49:36 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:49:36 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 15:49:36 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-08 15:49:37 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 15:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 15:49:37 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:49:37 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 15:49:37 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:49:37 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:49:37 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 15:49:37 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:49:37 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:49:37 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 15:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:49:37 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 15:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:49:37 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:49:37 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:49:37 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:49:37 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:49:37 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 15:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:49:37 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:49:37 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:49:37 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:49:37 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:49:37 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 15:49:37 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 15:49:37 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 15:49:37 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 15:49:37 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 15:49:37 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 15:49:37 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 15:49:37 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:49:37 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:49:37 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:49:37 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:49:37 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 15:49:37 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:49:37 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:49:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:49:37 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:49:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:49:37 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:49:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:49:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:49:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:49:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:49:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 15:49:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:49:37 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 15:49:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:49:37 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 15:49:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:49:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:49:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:49:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:49:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:49:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:49:37 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 15:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:49:37 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:49:37 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 15:49:37 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 30 ms +2016-04-08 15:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:49:37 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:49:37 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 15:49:37 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 15:49:37 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 15:49:37 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 15:49:37 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 15:49:37 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 15:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:49:37 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 15:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:49:37 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:49:37 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 15:49:37 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 15:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:49:37 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 15:49:37 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 15:49:37 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 15:49:37 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 15:49:37 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 15:49:37 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 15:49:37 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 15:49:37 INFO WorkspaceExplorerServiceImpl:142 - end time - 445 msc 0 sec +2016-04-08 15:49:38 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 15:50:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 15:50:22 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 15:51:49 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 15:51:49 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 15:51:49 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 15:51:49 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 15:51:49 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 15:51:49 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 15:51:49 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 15:51:49 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@1aeb447f +2016-04-08 15:51:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:51:49 INFO ASLSession:352 - Logging the entrance +2016-04-08 15:51:49 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 15:51:49 DEBUG TemplateModel:83 - 2016-04-08 15:51:49, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 15:51:49 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 15:51:49 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 15:51:54 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 15:51:54 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 15:51:54 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 15:51:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:51:54 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 15:51:54 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 15:51:54 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 15:51:54 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 127 ms +2016-04-08 15:51:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 15:51:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 15:51:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 15:51:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 15:51:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 15:51:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 15:51:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 15:51:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 15:51:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 15:51:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 15:51:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 15:51:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 15:51:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 15:51:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 15:51:55 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 15:51:55 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:51:55 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 15:51:55 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@22ad90f6 +2016-04-08 15:51:55 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@7c7a3e4e +2016-04-08 15:51:55 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@7a0f1c31 +2016-04-08 15:51:55 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@6507b290 +2016-04-08 15:51:55 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 119 ms +2016-04-08 15:51:55 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 15:51:55 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 15:51:55 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 15:51:55 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 15:51:55 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 15:51:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:51:55 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:51:55 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 15:51:55 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 21 ms +2016-04-08 15:51:55 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 15:51:55 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:51:55 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 15:51:55 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 15:51:56 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:51:56 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 15:51:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 15:51:59 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 15:51:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 15:51:59 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 15:51:59 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:51:59 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 15:51:59 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 15:51:59 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 15:51:59 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 15:51:59 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 15:51:59 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 15:51:59 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 15:51:59 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 15:51:59 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 15:51:59 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 15:51:59 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 15:51:59 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:51:59 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 15:51:59 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 15:51:59 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 15:51:59 DEBUG WPS2SM:201 - Schema: null +2016-04-08 15:51:59 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 15:51:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 15:51:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 15:51:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:51:59 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 15:51:59 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 15:51:59 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:51:59 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 15:51:59 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 15:51:59 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 15:51:59 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 15:51:59 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 15:51:59 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 15:51:59 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 15:51:59 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 15:51:59 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 15:51:59 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 15:51:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 15:51:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 15:51:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:51:59 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 15:51:59 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 15:51:59 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:51:59 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 15:51:59 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 15:51:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 15:51:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 15:51:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:51:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 15:51:59 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 15:51:59 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:51:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 15:51:59 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 15:51:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 15:51:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 15:51:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:51:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 15:51:59 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 15:51:59 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:51:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 15:51:59 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 15:51:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 15:51:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 15:51:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:51:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 15:51:59 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 15:51:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:51:59 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 15:51:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:51:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:51:59 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 15:51:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:51:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:51:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:51:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:51:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:52:00 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 15:52:00 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 15:52:00 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 15:52:00 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 15:52:00 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 15:52:00 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 15:52:00 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 15:52:00 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 15:52:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 15:52:00 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:52:00 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 15:52:00 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-08 15:52:00 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 15:52:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:52:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:52:00 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 15:52:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:52:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:52:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:52:00 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 15:52:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:52:00 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 15:52:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:52:00 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 15:52:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:52:00 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:52:00 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:52:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:52:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:52:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 15:52:00 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 15:52:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 15:52:00 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:52:00 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:52:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:52:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:52:00 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 15:52:00 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 15:52:00 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 15:52:00 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 15:52:00 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 15:52:00 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 15:52:00 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 15:52:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:52:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:52:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:52:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:52:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:52:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:52:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:52:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:52:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:52:00 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 15:52:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:52:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:52:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:52:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:52:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:52:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 15:52:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:52:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:52:00 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 15:52:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:52:00 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 15:52:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:52:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:52:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:52:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:52:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:52:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:52:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:52:00 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 15:52:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 15:52:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 15:52:01 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:52:01 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 15:52:01 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 28 ms +2016-04-08 15:52:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 15:52:01 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:52:01 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 15:52:01 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 15:52:01 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 15:52:01 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 15:52:01 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 15:52:01 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 15:52:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 15:52:01 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 15:52:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 15:52:01 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:52:01 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 15:52:01 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 15:52:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 15:52:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 15:52:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 15:52:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 15:52:01 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 15:52:01 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 15:52:01 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 15:52:01 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 15:52:01 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 15:52:01 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 15:52:01 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 15:52:01 INFO WorkspaceExplorerServiceImpl:142 - end time - 491 msc 0 sec +2016-04-08 15:52:01 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 15:52:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 15:52:44 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 15:55:00 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 15:55:00 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 15:55:00 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 15:55:00 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 15:55:00 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 15:55:00 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 15:55:00 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 15:55:00 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@4a6c2b5d +2016-04-08 15:55:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:55:00 INFO ASLSession:352 - Logging the entrance +2016-04-08 15:55:00 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 15:55:00 DEBUG TemplateModel:83 - 2016-04-08 15:55:00, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 15:55:00 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 15:55:00 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 15:55:05 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 15:55:05 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 15:55:05 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 15:55:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:55:05 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 15:55:05 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 15:55:05 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 15:55:05 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 132 ms +2016-04-08 15:55:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 15:55:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 15:55:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 15:55:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 15:55:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 15:55:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 15:55:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 15:55:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 15:55:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 15:55:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 15:55:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 15:55:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 15:55:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 15:55:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 15:55:05 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 15:55:05 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:55:05 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 15:55:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@29c5ef45 +2016-04-08 15:55:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@288a2e62 +2016-04-08 15:55:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@44f5e501 +2016-04-08 15:55:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@1a2dd93 +2016-04-08 15:55:05 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 105 ms +2016-04-08 15:55:05 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 15:55:05 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 15:55:05 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 15:55:05 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 15:55:05 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 15:55:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:55:05 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:55:05 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 15:55:05 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 22 ms +2016-04-08 15:55:05 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 15:55:05 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:55:05 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 15:55:05 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 15:55:06 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:55:06 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 15:55:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 15:55:09 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 15:55:09 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 15:55:09 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 15:55:09 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:55:09 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 15:55:10 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 15:55:10 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 15:55:10 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 15:55:10 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 15:55:10 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 15:55:10 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 15:55:10 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 15:55:10 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 15:55:10 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 15:55:10 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 15:55:10 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 15:55:10 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 15:55:10 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 15:55:10 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 15:55:10 DEBUG WPS2SM:201 - Schema: null +2016-04-08 15:55:10 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 15:55:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 15:55:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 15:55:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:55:10 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 15:55:10 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 15:55:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:55:10 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 15:55:10 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 15:55:10 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 15:55:10 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 15:55:10 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 15:55:10 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 15:55:10 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 15:55:10 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 15:55:10 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 15:55:10 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 15:55:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 15:55:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 15:55:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:55:10 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 15:55:10 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 15:55:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:55:10 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 15:55:10 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 15:55:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 15:55:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 15:55:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:55:10 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 15:55:10 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 15:55:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:55:10 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 15:55:10 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 15:55:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 15:55:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 15:55:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:55:10 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 15:55:10 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 15:55:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 15:55:10 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 15:55:10 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 15:55:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 15:55:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 15:55:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 15:55:10 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 15:55:10 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 15:55:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 15:55:10 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 15:55:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 15:55:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:55:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:55:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:55:10 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 15:55:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:55:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:55:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:55:10 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 15:55:10 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 15:55:10 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 15:55:10 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 15:55:10 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 15:55:10 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 15:55:10 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 15:55:10 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 15:55:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 15:55:10 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:55:10 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 15:55:10 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 15:55:10 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 15:55:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 15:55:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:55:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:55:10 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 15:55:10 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 15:55:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:55:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:55:10 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:55:10 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 15:55:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 15:55:10 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 15:55:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 15:55:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:55:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:55:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:55:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:55:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:55:10 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 15:55:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:55:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 15:55:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 15:55:10 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 15:55:10 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 15:55:11 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 15:55:11 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 15:55:11 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 15:55:11 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 15:55:11 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 15:55:11 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 15:55:11 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 15:55:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:55:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:55:11 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:55:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:55:11 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:55:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 15:55:11 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:55:11 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 15:55:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:55:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:55:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:55:11 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 15:55:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:55:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:55:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:55:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 15:55:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:55:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 15:55:11 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 15:55:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 15:55:11 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 15:55:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:55:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:55:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:55:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:55:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 15:55:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:55:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 15:55:11 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 15:55:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:55:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:55:11 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:55:11 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 15:55:11 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 30 ms +2016-04-08 15:55:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:55:11 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:55:11 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 15:55:11 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 15:55:11 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 15:55:11 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 15:55:11 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 15:55:11 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 15:55:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:55:11 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 15:55:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:55:11 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 15:55:11 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 15:55:11 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 15:55:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:55:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:55:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 15:55:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 15:55:11 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 15:55:11 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 15:55:11 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 15:55:11 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 15:55:11 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 15:55:11 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 15:55:11 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 15:55:11 INFO WorkspaceExplorerServiceImpl:142 - end time - 444 msc 0 sec +2016-04-08 15:55:11 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 15:55:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 15:55:55 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 15:56:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 15:56:50 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 15:57:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 15:57:45 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 15:58:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 15:58:40 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 15:59:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 15:59:35 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 16:00:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:00:30 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 16:01:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:01:25 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 16:02:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:02:20 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 16:03:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 16:03:15 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 16:04:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 16:04:10 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 16:05:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 16:05:05 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 16:06:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 16:06:00 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 16:06:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 16:06:55 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 16:07:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 16:07:50 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 16:08:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 16:08:45 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 16:09:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 16:09:40 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 16:10:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 16:10:35 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 16:11:17 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 16:11:17 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 16:11:17 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 16:11:17 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 16:11:17 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 16:11:17 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 16:11:17 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 16:11:17 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@1bec30c3 +2016-04-08 16:11:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 16:11:17 INFO ASLSession:352 - Logging the entrance +2016-04-08 16:11:17 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 16:11:17 DEBUG TemplateModel:83 - 2016-04-08 16:11:17, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 16:11:17 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 16:11:17 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 16:11:22 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 16:11:22 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 16:11:22 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 16:11:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:11:22 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 16:11:22 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 16:11:22 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 16:11:23 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 126 ms +2016-04-08 16:11:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 16:11:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 16:11:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 16:11:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 16:11:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 16:11:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 16:11:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 16:11:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 16:11:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 16:11:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 16:11:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 16:11:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 16:11:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 16:11:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 16:11:23 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 16:11:23 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:11:23 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 16:11:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@11e4cf28 +2016-04-08 16:11:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@53689562 +2016-04-08 16:11:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@47889228 +2016-04-08 16:11:23 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@24fc2dce +2016-04-08 16:11:23 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 103 ms +2016-04-08 16:11:23 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 16:11:23 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 16:11:23 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 16:11:23 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 16:11:23 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 16:11:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:11:23 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:11:23 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 16:11:23 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 32 ms +2016-04-08 16:11:23 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 16:11:23 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 16:11:23 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 16:11:23 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 16:11:24 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 16:11:24 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 16:11:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 16:11:28 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 16:11:28 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 16:11:28 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 16:11:28 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 16:11:28 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 16:11:29 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 16:11:29 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 16:11:29 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 16:11:29 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 16:11:29 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 16:11:29 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 16:11:29 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 16:11:29 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 16:11:29 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 16:11:29 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 16:11:29 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 16:11:29 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 16:11:29 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 16:11:29 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 16:11:29 DEBUG WPS2SM:201 - Schema: null +2016-04-08 16:11:29 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 16:11:29 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 16:11:29 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 16:11:29 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:11:29 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 16:11:29 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 16:11:29 DEBUG WPS2SM:93 - WPS type: +2016-04-08 16:11:29 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 16:11:29 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 16:11:29 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 16:11:29 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 16:11:29 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 16:11:29 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 16:11:29 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 16:11:29 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 16:11:29 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 16:11:29 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 16:11:29 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 16:11:29 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 16:11:29 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:11:29 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 16:11:29 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 16:11:29 DEBUG WPS2SM:93 - WPS type: +2016-04-08 16:11:29 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 16:11:29 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 16:11:29 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 16:11:29 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 16:11:29 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:11:29 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 16:11:29 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 16:11:29 DEBUG WPS2SM:93 - WPS type: +2016-04-08 16:11:29 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 16:11:29 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 16:11:29 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 16:11:29 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 16:11:29 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:11:29 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 16:11:29 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 16:11:29 DEBUG WPS2SM:93 - WPS type: +2016-04-08 16:11:29 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 16:11:29 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 16:11:29 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 16:11:29 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 16:11:29 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:11:29 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 16:11:29 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 16:11:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 16:11:29 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 16:11:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 16:11:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 16:11:29 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 16:11:29 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 16:11:29 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 16:11:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 16:11:29 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 16:11:29 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 16:11:29 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 16:11:29 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 16:11:29 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 16:11:29 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 16:11:29 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 16:11:29 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 16:11:29 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 16:11:29 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 16:11:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 16:11:29 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:11:29 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 16:11:29 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 19 ms +2016-04-08 16:11:29 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 16:11:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 16:11:29 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 16:11:29 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 16:11:29 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 16:11:29 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 16:11:29 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 16:11:29 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 16:11:29 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:11:29 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 16:11:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 16:11:29 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 16:11:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 16:11:29 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 16:11:29 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 16:11:29 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 16:11:29 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 16:11:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 16:11:29 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 16:11:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 16:11:29 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 16:11:29 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 16:11:29 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 16:11:29 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 16:11:30 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 16:11:30 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 16:11:30 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 16:11:30 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 16:11:30 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 16:11:30 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 16:11:30 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 16:11:30 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:11:30 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:11:30 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:11:30 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 16:11:30 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:11:30 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 16:11:30 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 16:11:30 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 16:11:30 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 16:11:30 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:11:30 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:11:30 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:11:30 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:11:30 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:11:30 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:11:30 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 16:11:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 16:11:30 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 16:11:30 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:11:30 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:11:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 16:11:30 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 16:11:30 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:11:30 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:11:30 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:11:30 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:11:30 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:11:30 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:11:30 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 16:11:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 16:11:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 16:11:30 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:11:30 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 16:11:30 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 21 ms +2016-04-08 16:11:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 16:11:30 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:11:30 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 16:11:30 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 16:11:30 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 16:11:30 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 16:11:30 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 16:11:30 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 16:11:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 16:11:30 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 16:11:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 16:11:30 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:11:30 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 16:11:30 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 16:11:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 16:11:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 16:11:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 16:11:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 16:11:30 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 16:11:30 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 16:11:30 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 16:11:30 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 16:11:30 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 16:11:30 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 16:11:30 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 16:11:30 INFO WorkspaceExplorerServiceImpl:142 - end time - 492 msc 0 sec +2016-04-08 16:11:30 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 16:12:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 16:12:12 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 16:13:09 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 16:13:09 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 16:13:09 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 16:13:10 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 16:13:10 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 16:13:10 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 16:13:10 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 16:13:10 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@26d55d12 +2016-04-08 16:13:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 16:13:10 INFO ASLSession:352 - Logging the entrance +2016-04-08 16:13:10 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 16:13:10 DEBUG TemplateModel:83 - 2016-04-08 16:13:10, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 16:13:10 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 16:13:10 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 16:13:13 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 16:13:13 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 16:13:13 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 16:13:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 16:13:13 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 16:13:13 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 16:13:13 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 16:13:13 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 118 ms +2016-04-08 16:13:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 16:13:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 16:13:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 16:13:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 16:13:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 16:13:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 16:13:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 16:13:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 16:13:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 16:13:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 16:13:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 16:13:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 16:13:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 16:13:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 16:13:13 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 16:13:13 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:13:13 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 16:13:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@7a207ddd +2016-04-08 16:13:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@361578f1 +2016-04-08 16:13:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@4e4a6841 +2016-04-08 16:13:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@678fe801 +2016-04-08 16:13:13 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 120 ms +2016-04-08 16:13:14 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 16:13:14 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 16:13:14 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 16:13:14 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 16:13:14 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 16:13:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 16:13:14 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:13:14 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 16:13:14 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 28 ms +2016-04-08 16:13:14 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 16:13:14 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 16:13:14 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 16:13:14 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 16:13:14 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 16:13:14 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 16:13:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 16:13:17 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 16:13:17 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 16:13:17 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 16:13:17 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 16:13:17 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 16:13:17 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 16:13:17 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 16:13:17 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 16:13:17 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 16:13:17 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 16:13:17 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 16:13:17 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 16:13:17 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 16:13:18 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 16:13:18 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 16:13:18 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 16:13:18 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 16:13:18 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 16:13:18 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 16:13:18 DEBUG WPS2SM:201 - Schema: null +2016-04-08 16:13:18 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 16:13:18 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 16:13:18 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 16:13:18 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:13:18 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 16:13:18 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 16:13:18 DEBUG WPS2SM:93 - WPS type: +2016-04-08 16:13:18 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 16:13:18 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 16:13:18 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 16:13:18 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 16:13:18 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 16:13:18 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 16:13:18 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 16:13:18 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 16:13:18 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 16:13:18 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 16:13:18 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 16:13:18 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 16:13:18 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:13:18 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 16:13:18 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 16:13:18 DEBUG WPS2SM:93 - WPS type: +2016-04-08 16:13:18 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 16:13:18 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 16:13:18 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 16:13:18 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 16:13:18 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:13:18 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 16:13:18 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 16:13:18 DEBUG WPS2SM:93 - WPS type: +2016-04-08 16:13:18 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 16:13:18 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 16:13:18 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 16:13:18 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 16:13:18 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:13:18 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 16:13:18 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 16:13:18 DEBUG WPS2SM:93 - WPS type: +2016-04-08 16:13:18 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 16:13:18 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 16:13:18 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 16:13:18 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 16:13:18 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:13:18 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 16:13:18 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 16:13:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 16:13:18 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 16:13:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 16:13:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 16:13:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 16:13:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:13:18 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 16:13:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:13:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 16:13:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 16:13:18 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 16:13:18 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 16:13:18 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 16:13:18 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 16:13:18 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 16:13:18 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 16:13:18 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 16:13:18 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 16:13:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 16:13:18 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:13:18 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 16:13:18 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 16:13:18 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 16:13:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:13:18 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 16:13:18 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 16:13:18 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 16:13:18 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 16:13:18 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 16:13:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:13:18 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 16:13:18 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 16:13:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 16:13:18 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 16:13:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 16:13:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 16:13:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 16:13:18 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 16:13:18 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 16:13:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 16:13:18 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 16:13:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 16:13:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 16:13:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 16:13:18 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 16:13:18 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 16:13:18 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 16:13:18 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 16:13:18 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 16:13:18 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 16:13:18 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 16:13:18 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 16:13:18 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 16:13:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:13:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 16:13:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:13:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:13:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 16:13:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:13:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 16:13:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:13:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:13:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:13:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 16:13:18 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 16:13:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:13:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 16:13:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:13:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:13:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:13:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 16:13:18 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 16:13:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:13:18 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 16:13:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:13:19 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:13:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:13:19 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:13:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:13:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:13:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:13:19 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 16:13:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 16:13:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 16:13:19 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:13:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 16:13:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 32 ms +2016-04-08 16:13:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 16:13:19 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:13:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 16:13:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 16:13:19 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 16:13:19 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 16:13:19 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 16:13:19 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 16:13:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 16:13:19 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 16:13:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 16:13:19 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:13:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 16:13:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-08 16:13:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 16:13:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 16:13:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 16:13:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 16:13:19 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 16:13:19 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 16:13:19 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 16:13:19 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 16:13:19 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 16:13:19 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 16:13:19 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 16:13:19 INFO WorkspaceExplorerServiceImpl:142 - end time - 483 msc 0 sec +2016-04-08 16:13:19 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 16:14:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 16:14:04 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 16:14:26 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 16:14:26 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 16:14:26 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 16:14:26 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 16:14:26 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 16:14:26 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 16:14:26 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 16:14:26 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2a44b67 +2016-04-08 16:14:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 16:14:26 INFO ASLSession:352 - Logging the entrance +2016-04-08 16:14:26 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 16:14:26 DEBUG TemplateModel:83 - 2016-04-08 16:14:26, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 16:14:26 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 16:14:26 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 16:14:29 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 16:14:29 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 16:14:29 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 16:14:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 16:14:29 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 16:14:29 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 16:14:29 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 16:14:29 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 124 ms +2016-04-08 16:14:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 16:14:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 16:14:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 16:14:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 16:14:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 16:14:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 16:14:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 16:14:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 16:14:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 16:14:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 16:14:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 16:14:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 16:14:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 16:14:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 16:14:30 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 16:14:30 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:14:30 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 16:14:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@2c19f426 +2016-04-08 16:14:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@3c545cd5 +2016-04-08 16:14:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@2aaafc7d +2016-04-08 16:14:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@36c53d99 +2016-04-08 16:14:30 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 121 ms +2016-04-08 16:14:30 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 16:14:30 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 16:14:30 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 16:14:30 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 16:14:30 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 16:14:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 16:14:30 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:14:30 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 16:14:30 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 48 ms +2016-04-08 16:14:30 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 16:14:30 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 16:14:30 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 16:14:30 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 16:14:31 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 16:14:31 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 16:14:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 16:14:34 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 16:14:34 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 16:14:34 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 16:14:34 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 16:14:34 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 16:14:34 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 16:14:34 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 16:14:34 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 16:14:34 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 16:14:34 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 16:14:34 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 16:14:34 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 16:14:34 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 16:14:34 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 16:14:34 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 16:14:34 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 16:14:34 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 16:14:34 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 16:14:34 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 16:14:34 DEBUG WPS2SM:201 - Schema: null +2016-04-08 16:14:34 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 16:14:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 16:14:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 16:14:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:14:34 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 16:14:34 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 16:14:34 DEBUG WPS2SM:93 - WPS type: +2016-04-08 16:14:34 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 16:14:34 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 16:14:34 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 16:14:34 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 16:14:34 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 16:14:34 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 16:14:34 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 16:14:34 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 16:14:34 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 16:14:34 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 16:14:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 16:14:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 16:14:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:14:34 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 16:14:34 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 16:14:34 DEBUG WPS2SM:93 - WPS type: +2016-04-08 16:14:34 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 16:14:34 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 16:14:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 16:14:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 16:14:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:14:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 16:14:34 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 16:14:34 DEBUG WPS2SM:93 - WPS type: +2016-04-08 16:14:34 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 16:14:34 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 16:14:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 16:14:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 16:14:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:14:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 16:14:34 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 16:14:34 DEBUG WPS2SM:93 - WPS type: +2016-04-08 16:14:34 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 16:14:34 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 16:14:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 16:14:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 16:14:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:14:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 16:14:34 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 16:14:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 16:14:35 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 16:14:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 16:14:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 16:14:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 16:14:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 16:14:35 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 16:14:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 16:14:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 16:14:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 16:14:35 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 16:14:35 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 16:14:35 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 16:14:35 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 16:14:35 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 16:14:35 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 16:14:35 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 16:14:35 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 16:14:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 16:14:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:14:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 16:14:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 20 ms +2016-04-08 16:14:35 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 16:14:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 16:14:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 16:14:35 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 16:14:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 16:14:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 16:14:35 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 16:14:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 16:14:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:14:35 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 16:14:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:14:35 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 16:14:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:14:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 16:14:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 16:14:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 16:14:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 16:14:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 16:14:35 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 16:14:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 16:14:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 16:14:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 16:14:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 16:14:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 16:14:35 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 16:14:35 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 16:14:35 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 16:14:35 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 16:14:35 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 16:14:35 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 16:14:35 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 16:14:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:14:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:14:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 16:14:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:14:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:14:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 16:14:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:14:35 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 16:14:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 16:14:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 16:14:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:14:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:14:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:14:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:14:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:14:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 16:14:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 16:14:35 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 16:14:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:14:35 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 16:14:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:14:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:14:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:14:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:14:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:14:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:14:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:14:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:14:35 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 16:14:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 16:14:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 16:14:36 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:14:36 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 16:14:36 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 24 ms +2016-04-08 16:14:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 16:14:36 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:14:36 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 16:14:36 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 16:14:36 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 16:14:36 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 16:14:36 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 16:14:36 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 16:14:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 16:14:36 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 16:14:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 16:14:36 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:14:36 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 16:14:36 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 16:14:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 16:14:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 16:14:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 16:14:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 16:14:36 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 16:14:36 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 16:14:36 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 16:14:36 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 16:14:36 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 16:14:36 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 16:14:36 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 16:14:36 INFO WorkspaceExplorerServiceImpl:142 - end time - 413 msc 0 sec +2016-04-08 16:14:36 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 16:15:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 16:15:21 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 16:16:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 16:16:16 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 16:17:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 16:17:11 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 16:18:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:18:06 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 16:19:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:19:01 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 16:19:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:19:56 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 16:20:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 16:20:51 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 16:21:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 16:21:46 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 16:22:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 16:22:41 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 16:23:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 16:23:36 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 16:24:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 16:24:31 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 16:25:43 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 16:25:43 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 16:25:43 INFO DataMinerManagerServiceImpl:77 - DataMinerManager started! +2016-04-08 16:25:43 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 16:25:43 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 16:25:43 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 16:25:43 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 16:25:43 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@59e80259 +2016-04-08 16:25:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 16:25:43 INFO ASLSession:352 - Logging the entrance +2016-04-08 16:25:43 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 16:25:43 DEBUG TemplateModel:83 - 2016-04-08 16:25:43, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 16:25:43 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 16:25:43 DEBUG DataMinerManagerServiceImpl:96 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 16:25:47 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 16:25:47 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 16:25:47 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 16:25:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:25:47 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 16:25:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 16:25:47 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 16:25:47 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 145 ms +2016-04-08 16:25:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 16:25:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 16:25:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 16:25:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 16:25:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 16:25:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 16:25:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 16:25:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 16:25:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 16:25:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 16:25:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 16:25:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 16:25:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 16:25:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 16:25:47 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 16:25:48 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:25:48 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 16:25:48 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@7c064cc +2016-04-08 16:25:48 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@44604dd5 +2016-04-08 16:25:48 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@318be31d +2016-04-08 16:25:48 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@5e76bdf +2016-04-08 16:25:48 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 99 ms +2016-04-08 16:25:48 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 16:25:48 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 16:25:48 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 16:25:48 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 16:25:48 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 16:25:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:25:48 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:25:48 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 16:25:48 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 28 ms +2016-04-08 16:25:48 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 16:25:48 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 16:25:48 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 16:25:48 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 16:25:49 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 16:25:49 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 16:25:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 16:25:52 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 16:25:52 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 16:25:52 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 16:25:52 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 16:25:52 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 16:25:52 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 16:25:52 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 16:25:52 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 16:25:52 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 16:25:52 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 16:25:52 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 16:25:52 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 16:25:52 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 16:25:52 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 16:25:52 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 16:25:52 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 16:25:52 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 16:25:52 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 16:25:52 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 16:25:52 DEBUG WPS2SM:201 - Schema: null +2016-04-08 16:25:52 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 16:25:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 16:25:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 16:25:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:25:52 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 16:25:52 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 16:25:52 DEBUG WPS2SM:93 - WPS type: +2016-04-08 16:25:52 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 16:25:52 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 16:25:52 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 16:25:52 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 16:25:52 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 16:25:52 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 16:25:52 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 16:25:52 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 16:25:52 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 16:25:52 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 16:25:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 16:25:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 16:25:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:25:52 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 16:25:52 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 16:25:52 DEBUG WPS2SM:93 - WPS type: +2016-04-08 16:25:52 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 16:25:52 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 16:25:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 16:25:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 16:25:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:25:52 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 16:25:52 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 16:25:52 DEBUG WPS2SM:93 - WPS type: +2016-04-08 16:25:52 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 16:25:52 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 16:25:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 16:25:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 16:25:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:25:52 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 16:25:52 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 16:25:52 DEBUG WPS2SM:93 - WPS type: +2016-04-08 16:25:52 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 16:25:52 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 16:25:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 16:25:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 16:25:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 16:25:52 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 16:25:52 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 16:25:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 16:25:52 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 16:25:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 16:25:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 16:25:52 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 16:25:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 16:25:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 16:25:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 16:25:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 16:25:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 16:25:52 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 16:25:52 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 16:25:52 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 16:25:52 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 16:25:52 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 16:25:52 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 16:25:52 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 16:25:52 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 16:25:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 16:25:53 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:25:53 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 16:25:53 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 42 ms +2016-04-08 16:25:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 16:25:53 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 16:25:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 16:25:53 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 16:25:53 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 16:25:53 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 16:25:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:25:53 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 16:25:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:25:53 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 16:25:53 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 16:25:53 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 16:25:53 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 16:25:53 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 16:25:53 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 16:25:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 16:25:53 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 16:25:53 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 16:25:53 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 16:25:53 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 16:25:53 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 16:25:53 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 16:25:53 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 16:25:53 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:25:53 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 16:25:53 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 16:25:53 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 16:25:53 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 16:25:53 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 16:25:53 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 16:25:53 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 16:25:53 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 16:25:53 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 16:25:53 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 16:25:53 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 16:25:53 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 16:25:53 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 16:25:53 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:25:53 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:25:53 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 16:25:53 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:25:53 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 16:25:53 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 16:25:53 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 16:25:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:25:53 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 16:25:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:25:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:25:53 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 16:25:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:25:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:25:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:25:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 16:25:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 16:25:53 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 16:25:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:25:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 16:25:53 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 16:25:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:25:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:25:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:25:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 16:25:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:25:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:25:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 16:25:53 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 16:25:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 16:25:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:25:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:25:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 16:25:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 34 ms +2016-04-08 16:25:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 16:25:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:25:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 16:25:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 16:25:54 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 16:25:54 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 16:25:54 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 16:25:54 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 16:25:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 16:25:54 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 16:25:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 16:25:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 16:25:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 16:25:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 16:25:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 16:25:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 16:25:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 16:25:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 16:25:54 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 16:25:54 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 16:25:54 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 16:25:54 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 16:25:54 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 16:25:54 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 16:25:54 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 16:25:54 INFO WorkspaceExplorerServiceImpl:142 - end time - 440 msc 0 sec +2016-04-08 16:25:54 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 16:26:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 16:26:38 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 17:00:19 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 17:00:19 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 17:00:19 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 17:00:19 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 17:00:19 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 17:00:19 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:00:19 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 17:00:19 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@3e00a6f2 +2016-04-08 17:00:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:00:19 INFO ASLSession:352 - Logging the entrance +2016-04-08 17:00:19 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 17:00:19 DEBUG TemplateModel:83 - 2016-04-08 17:00:19, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 17:00:19 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:00:19 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 17:00:25 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 17:00:25 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 17:00:25 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 17:00:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:00:25 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 17:00:25 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:00:25 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:00:25 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 130 ms +2016-04-08 17:00:26 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 17:00:26 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 17:00:26 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 17:00:26 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 17:00:26 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 17:00:26 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 17:00:26 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 17:00:26 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 17:00:26 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 17:00:26 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 17:00:26 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 17:00:26 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 17:00:26 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 17:00:26 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 17:00:26 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 17:00:26 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:00:26 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:00:26 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@2cd20886 +2016-04-08 17:00:26 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1a21f395 +2016-04-08 17:00:26 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@21f98205 +2016-04-08 17:00:26 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@3a56a5f9 +2016-04-08 17:00:27 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 259 ms +2016-04-08 17:00:27 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 17:00:27 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 17:00:27 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 17:00:27 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 17:00:27 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 17:00:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:00:27 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:00:27 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 17:00:27 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 25 ms +2016-04-08 17:00:27 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 17:00:27 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:00:27 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 17:00:27 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:00:29 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:00:29 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 17:00:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:00:33 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:00:33 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:00:33 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 17:00:33 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:00:33 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:00:35 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 17:00:35 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 17:00:35 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 17:00:35 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 17:00:35 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 17:00:35 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 17:00:35 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 17:00:35 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 17:00:35 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 17:00:35 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 17:00:35 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:00:35 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 17:00:35 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 17:00:35 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 17:00:35 DEBUG WPS2SM:201 - Schema: null +2016-04-08 17:00:35 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 17:00:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 17:00:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 17:00:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:00:35 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 17:00:35 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 17:00:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:00:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:00:35 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 17:00:35 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 17:00:35 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 17:00:35 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 17:00:35 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 17:00:35 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 17:00:35 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 17:00:35 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 17:00:35 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 17:00:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 17:00:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 17:00:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:00:35 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 17:00:35 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 17:00:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:00:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:00:35 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 17:00:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 17:00:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 17:00:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:00:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 17:00:35 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 17:00:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:00:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:00:35 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 17:00:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 17:00:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 17:00:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:00:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 17:00:35 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 17:00:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:00:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:00:35 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 17:00:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 17:00:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 17:00:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:00:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 17:00:35 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 17:00:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:00:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 17:00:35 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 17:00:35 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 17:00:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 17:00:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:00:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 17:00:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 17:00:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 17:00:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 17:00:35 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 17:00:35 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 17:00:35 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 17:00:35 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 17:00:35 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 17:00:35 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 17:00:35 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 17:00:35 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 17:00:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 17:00:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:00:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 17:00:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 17:00:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:00:35 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 17:00:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:00:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:00:35 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:00:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:00:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 17:00:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 17:00:35 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 17:00:35 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 17:00:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 17:00:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 17:00:35 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 17:00:35 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 17:00:35 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 17:00:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:00:36 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 17:00:36 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 17:00:36 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 17:00:36 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 17:00:36 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 17:00:36 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 17:00:36 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 17:00:36 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 17:00:36 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 17:00:36 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 17:00:36 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 17:00:36 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 17:00:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 17:00:36 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 17:00:36 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 17:00:36 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 17:00:36 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 17:00:36 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 17:00:36 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 17:00:36 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 17:00:36 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 17:00:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 17:00:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 17:00:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 17:00:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 17:00:36 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 17:00:36 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 17:00:36 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 17:00:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 17:00:36 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 17:00:36 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 17:00:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 17:00:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 17:00:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 17:00:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 17:00:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 17:00:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 17:00:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:00:36 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:00:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:00:36 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 17:00:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 17:00:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 17:00:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 17:00:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 17:00:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 17:00:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 17:00:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 17:00:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 17:00:36 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 17:00:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 17:00:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:00:37 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:00:37 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 17:00:37 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 23 ms +2016-04-08 17:00:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 17:00:37 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:00:37 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 17:00:37 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-08 17:00:37 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 17:00:37 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 17:00:37 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 17:00:37 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 17:00:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 17:00:37 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 17:00:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 17:00:37 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:00:37 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 17:00:37 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-08 17:00:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 17:00:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 17:00:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 17:00:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:00:37 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 17:00:37 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 17:00:37 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 17:00:37 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 17:00:37 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 17:00:37 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 17:00:37 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 17:00:37 INFO WorkspaceExplorerServiceImpl:142 - end time - 497 msc 0 sec +2016-04-08 17:00:37 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 17:01:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 17:01:14 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 17:02:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:02:09 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:08:13 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 17:08:13 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 17:08:13 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 17:08:13 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 17:08:13 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 17:08:13 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:08:13 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 17:08:13 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@3fc1653e +2016-04-08 17:08:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:08:13 INFO ASLSession:352 - Logging the entrance +2016-04-08 17:08:13 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 17:08:13 DEBUG TemplateModel:83 - 2016-04-08 17:08:13, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 17:08:13 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:08:13 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 17:08:17 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 17:08:17 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 17:08:17 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 17:08:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 17:08:17 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 17:08:17 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:08:17 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:08:17 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 120 ms +2016-04-08 17:08:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 17:08:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 17:08:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 17:08:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 17:08:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 17:08:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 17:08:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 17:08:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 17:08:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 17:08:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 17:08:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 17:08:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 17:08:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 17:08:17 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 17:08:17 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 17:08:17 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:08:17 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:08:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@757c81b2 +2016-04-08 17:08:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@134918cc +2016-04-08 17:08:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@39da7600 +2016-04-08 17:08:17 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@1ea0b98d +2016-04-08 17:08:17 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 161 ms +2016-04-08 17:08:17 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 17:08:18 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 17:08:18 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 17:08:18 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 17:08:18 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 17:08:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 17:08:18 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:08:18 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 17:08:18 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 21 ms +2016-04-08 17:08:18 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 17:08:18 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:08:18 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 17:08:18 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:08:19 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:08:19 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 17:08:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 17:08:22 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 17:08:22 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:08:22 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 17:08:22 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:08:22 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:08:22 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 17:08:22 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 17:08:22 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 17:08:22 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 17:08:22 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 17:08:22 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 17:08:22 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 17:08:22 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 17:08:22 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 17:08:22 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 17:08:22 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:08:22 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 17:08:22 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 17:08:22 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 17:08:22 DEBUG WPS2SM:201 - Schema: null +2016-04-08 17:08:22 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 17:08:22 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 17:08:22 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 17:08:22 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:08:22 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 17:08:22 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 17:08:22 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:08:22 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:08:22 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 17:08:22 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 17:08:22 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 17:08:22 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 17:08:22 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 17:08:22 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 17:08:22 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 17:08:22 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 17:08:22 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 17:08:22 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 17:08:22 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 17:08:22 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:08:22 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 17:08:22 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 17:08:22 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:08:22 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:08:22 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 17:08:22 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 17:08:22 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 17:08:22 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:08:22 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 17:08:22 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 17:08:22 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:08:22 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:08:22 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 17:08:22 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 17:08:22 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 17:08:22 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:08:22 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 17:08:22 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 17:08:22 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:08:22 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:08:22 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 17:08:22 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 17:08:22 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 17:08:22 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:08:22 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 17:08:22 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 17:08:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:08:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 17:08:23 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 17:08:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:08:23 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 17:08:23 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 17:08:23 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 17:08:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 17:08:23 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 17:08:23 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 17:08:23 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 17:08:23 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 17:08:23 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 17:08:23 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 17:08:23 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 17:08:23 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 17:08:23 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 17:08:23 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 17:08:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 17:08:23 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:08:23 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 17:08:23 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 17:08:23 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 17:08:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 17:08:23 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 17:08:23 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 17:08:23 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 17:08:23 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 17:08:23 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 17:08:23 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 17:08:23 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 17:08:23 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 17:08:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 17:08:23 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 17:08:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 17:08:23 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 17:08:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:08:23 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 17:08:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:08:23 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 17:08:23 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 17:08:23 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 17:08:23 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 17:08:23 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 17:08:23 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 17:08:23 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 17:08:23 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 17:08:23 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 17:08:23 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 17:08:23 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 17:08:23 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 17:08:23 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 17:08:23 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 17:08:23 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 17:08:23 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 17:08:23 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 17:08:23 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 17:08:23 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 17:08:23 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 17:08:23 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 17:08:23 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 17:08:23 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 17:08:23 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 17:08:23 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 17:08:23 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 17:08:23 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 17:08:23 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 17:08:23 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 17:08:23 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 17:08:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 17:08:23 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 17:08:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 17:08:23 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 17:08:23 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 17:08:23 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 17:08:23 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 17:08:23 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 17:08:23 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 17:08:23 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 17:08:23 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 17:08:23 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 17:08:24 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 17:08:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 17:08:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:08:24 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:08:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 17:08:24 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 32 ms +2016-04-08 17:08:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 17:08:24 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:08:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 17:08:24 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 17:08:24 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 17:08:24 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 17:08:24 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 17:08:24 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 17:08:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 17:08:24 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 17:08:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 17:08:24 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:08:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 17:08:24 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 17:08:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 17:08:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 17:08:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 17:08:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:08:24 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 17:08:24 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 17:08:24 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 17:08:24 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 17:08:24 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 17:08:24 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 17:08:24 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 17:08:24 INFO WorkspaceExplorerServiceImpl:142 - end time - 471 msc 0 sec +2016-04-08 17:08:24 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 17:09:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:09:08 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:10:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:10:03 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:10:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 17:10:58 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 17:11:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 17:11:53 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 17:12:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 17:12:48 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 17:13:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 17:13:43 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 17:14:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 17:14:38 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 17:15:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 17:15:33 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 17:16:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:16:28 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 17:17:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 17:17:23 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 17:18:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:18:18 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 17:23:01 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 17:23:01 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 17:23:01 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 17:23:01 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 17:23:01 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 17:23:01 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:23:01 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 17:23:01 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@76250753 +2016-04-08 17:23:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:23:01 INFO ASLSession:352 - Logging the entrance +2016-04-08 17:23:01 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 17:23:01 DEBUG TemplateModel:83 - 2016-04-08 17:23:01, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 17:23:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:23:01 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 17:23:06 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 17:23:06 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 17:23:06 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 17:23:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:23:06 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 17:23:06 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:23:06 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:23:06 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 126 ms +2016-04-08 17:23:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 17:23:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 17:23:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 17:23:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 17:23:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 17:23:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 17:23:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 17:23:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 17:23:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 17:23:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 17:23:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 17:23:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 17:23:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 17:23:06 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 17:23:06 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 17:23:06 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:23:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:23:07 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@57ce58ed +2016-04-08 17:23:07 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@175acd4b +2016-04-08 17:23:07 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6865438a +2016-04-08 17:23:07 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@66854ef3 +2016-04-08 17:23:07 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 103 ms +2016-04-08 17:23:07 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 17:23:07 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 17:23:07 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 17:23:07 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 17:23:07 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 17:23:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:23:07 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:23:07 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 17:23:07 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 20 ms +2016-04-08 17:23:07 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 17:23:07 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:23:07 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 17:23:07 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:23:07 ERROR DataMinerManagerServiceImpl:115 - An error occurred getting the OperatorsClassifications list +org.gcube.portlets.user.dataminermanager.shared.exception.ServiceException: Error occured while retrieving capabilities from url: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService?Service=WPS&Request=GetCapabilities&version=1.0.0& + at org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS.connect(SClient4WPS.java:333) + at org.gcube.portlets.user.dataminermanager.server.smservice.SClient4WPS.getOperatorsClassifications(SClient4WPS.java:159) + at org.gcube.portlets.user.dataminermanager.server.DataMinerManagerServiceImpl.getOperatorsClassifications(DataMinerManagerServiceImpl.java:112) + at sun.reflect.NativeMethodAccessorImpl.invoke0(Native Method) + at sun.reflect.NativeMethodAccessorImpl.invoke(NativeMethodAccessorImpl.java:57) + at sun.reflect.DelegatingMethodAccessorImpl.invoke(DelegatingMethodAccessorImpl.java:43) + at java.lang.reflect.Method.invoke(Method.java:606) + at com.google.gwt.user.server.rpc.RPC.invokeAndEncodeResponse(RPC.java:561) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processCall(RemoteServiceServlet.java:265) + at com.google.gwt.user.server.rpc.RemoteServiceServlet.processPost(RemoteServiceServlet.java:305) + at com.google.gwt.user.server.rpc.AbstractRemoteServiceServlet.doPost(AbstractRemoteServiceServlet.java:62) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:755) + at javax.servlet.http.HttpServlet.service(HttpServlet.java:848) + at org.eclipse.jetty.servlet.ServletHolder.handle(ServletHolder.java:686) + at org.eclipse.jetty.servlet.ServletHandler.doHandle(ServletHandler.java:501) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:137) + at org.eclipse.jetty.security.SecurityHandler.handle(SecurityHandler.java:557) + at org.eclipse.jetty.server.session.SessionHandler.doHandle(SessionHandler.java:231) + at org.eclipse.jetty.server.handler.ContextHandler.doHandle(ContextHandler.java:1086) + at org.eclipse.jetty.servlet.ServletHandler.doScope(ServletHandler.java:428) + at org.eclipse.jetty.server.session.SessionHandler.doScope(SessionHandler.java:193) + at org.eclipse.jetty.server.handler.ContextHandler.doScope(ContextHandler.java:1020) + at org.eclipse.jetty.server.handler.ScopedHandler.handle(ScopedHandler.java:135) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.handler.RequestLogHandler.handle(RequestLogHandler.java:68) + at org.eclipse.jetty.server.handler.HandlerWrapper.handle(HandlerWrapper.java:116) + at org.eclipse.jetty.server.Server.handle(Server.java:370) + at org.eclipse.jetty.server.AbstractHttpConnection.handleRequest(AbstractHttpConnection.java:489) + at org.eclipse.jetty.server.AbstractHttpConnection.content(AbstractHttpConnection.java:960) + at org.eclipse.jetty.server.AbstractHttpConnection$RequestHandler.content(AbstractHttpConnection.java:1021) + at org.eclipse.jetty.http.HttpParser.parseNext(HttpParser.java:865) + at org.eclipse.jetty.http.HttpParser.parseAvailable(HttpParser.java:240) + at org.eclipse.jetty.server.AsyncHttpConnection.handle(AsyncHttpConnection.java:82) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint.handle(SelectChannelEndPoint.java:668) + at org.eclipse.jetty.io.nio.SelectChannelEndPoint$1.run(SelectChannelEndPoint.java:52) + at org.eclipse.jetty.util.thread.QueuedThreadPool.runJob(QueuedThreadPool.java:608) + at org.eclipse.jetty.util.thread.QueuedThreadPool$3.run(QueuedThreadPool.java:543) + at java.lang.Thread.run(Thread.java:745) +2016-04-08 17:23:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:23:37 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:23:37 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:23:37 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 17:23:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 17:23:45 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 17:23:45 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:23:45 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:23:47 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:23:47 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 17:23:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:23:50 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 17:23:50 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:23:50 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 17:23:50 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:23:50 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:23:52 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 17:23:52 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 17:23:52 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 17:23:52 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 17:23:52 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 17:23:52 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 17:23:52 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 17:23:52 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 17:23:52 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 17:23:52 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 17:23:52 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:23:52 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 17:23:52 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 17:23:52 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 17:23:52 DEBUG WPS2SM:201 - Schema: null +2016-04-08 17:23:52 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 17:23:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 17:23:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 17:23:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:23:52 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 17:23:52 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 17:23:52 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:23:52 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:23:52 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 17:23:52 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 17:23:52 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 17:23:52 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 17:23:52 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 17:23:52 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 17:23:52 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 17:23:52 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 17:23:52 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 17:23:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 17:23:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 17:23:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:23:52 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 17:23:52 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 17:23:52 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:23:52 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:23:52 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 17:23:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 17:23:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 17:23:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:23:52 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 17:23:52 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 17:23:52 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:23:52 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:23:52 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 17:23:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 17:23:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 17:23:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:23:52 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 17:23:52 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 17:23:52 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:23:52 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:23:52 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 17:23:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 17:23:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 17:23:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:23:52 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 17:23:52 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 17:24:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:24:10 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:24:10 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:24:10 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 17:24:10 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:24:10 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:24:10 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 17:24:10 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 17:24:10 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 17:24:10 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 17:24:10 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 17:24:10 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 17:24:10 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 17:24:10 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 17:24:10 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 17:24:10 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 17:24:10 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:24:10 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 17:24:10 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 17:24:10 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 17:24:10 DEBUG WPS2SM:201 - Schema: null +2016-04-08 17:24:10 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 17:24:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 17:24:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 17:24:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:24:10 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 17:24:10 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 17:24:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:24:10 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:24:10 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 17:24:10 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 17:24:10 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 17:24:10 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 17:24:10 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 17:24:10 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 17:24:10 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 17:24:10 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 17:24:10 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 17:24:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 17:24:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 17:24:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:24:10 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 17:24:10 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 17:24:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:24:10 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:24:10 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 17:24:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 17:24:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 17:24:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:24:10 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 17:24:10 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 17:24:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:24:10 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:24:10 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 17:24:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 17:24:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 17:24:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:24:10 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 17:24:10 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 17:24:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:24:10 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:24:10 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 17:24:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 17:24:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 17:24:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:24:10 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 17:24:10 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 17:24:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 17:24:32 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 17:25:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:25:27 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:26:05 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 17:26:05 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 17:26:05 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 17:26:05 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 17:26:05 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 17:26:05 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:26:05 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 17:26:05 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@47ca1015 +2016-04-08 17:26:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:26:05 INFO ASLSession:352 - Logging the entrance +2016-04-08 17:26:05 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 17:26:05 DEBUG TemplateModel:83 - 2016-04-08 17:26:05, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 17:26:05 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:26:05 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 17:26:08 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 17:26:08 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 17:26:08 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 17:26:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 17:26:08 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 17:26:08 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:26:08 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:26:08 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 119 ms +2016-04-08 17:26:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 17:26:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 17:26:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 17:26:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 17:26:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 17:26:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 17:26:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 17:26:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 17:26:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 17:26:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 17:26:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 17:26:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 17:26:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 17:26:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 17:26:08 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 17:26:08 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:26:09 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:26:09 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6ef165ff +2016-04-08 17:26:09 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@34783057 +2016-04-08 17:26:09 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@66553c89 +2016-04-08 17:26:09 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@79b84215 +2016-04-08 17:26:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 126 ms +2016-04-08 17:26:09 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 17:26:09 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 17:26:09 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 17:26:09 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 17:26:09 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 17:26:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 17:26:09 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:26:09 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 17:26:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 32 ms +2016-04-08 17:26:09 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 17:26:09 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:26:09 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 17:26:09 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:26:10 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:26:10 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 17:26:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:26:13 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 17:26:13 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:26:13 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 17:26:13 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:26:13 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:26:13 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 17:26:13 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 17:26:13 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 17:26:13 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 17:26:13 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 17:26:13 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 17:26:13 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 17:26:13 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 17:26:13 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 17:26:13 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 17:26:13 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:26:13 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 17:26:13 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 17:26:13 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 17:26:13 DEBUG WPS2SM:201 - Schema: null +2016-04-08 17:26:13 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 17:26:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 17:26:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 17:26:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:26:13 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 17:26:13 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 17:26:13 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:26:13 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:26:13 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 17:26:13 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 17:26:13 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 17:26:13 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 17:26:13 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 17:26:13 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 17:26:13 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 17:26:13 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 17:26:13 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 17:26:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 17:26:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 17:26:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:26:13 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 17:26:13 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 17:26:13 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:26:13 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:26:13 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 17:26:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 17:26:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 17:26:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:26:13 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 17:26:13 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 17:26:13 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:26:13 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:26:13 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 17:26:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 17:26:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 17:26:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:26:13 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 17:26:13 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 17:26:13 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:26:13 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:26:13 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 17:26:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 17:26:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 17:26:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:26:13 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 17:26:13 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 17:27:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:27:00 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:27:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 17:27:55 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 17:30:16 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 17:30:16 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 17:30:16 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 17:30:16 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 17:30:16 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 17:30:16 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 17:30:16 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 17:30:16 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@38c7ed91 +2016-04-08 17:30:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:30:16 INFO ASLSession:352 - Logging the entrance +2016-04-08 17:30:16 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 17:30:16 DEBUG TemplateModel:83 - 2016-04-08 17:30:16, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 17:30:16 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:30:16 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 17:30:19 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 17:30:19 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 17:30:19 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 17:30:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:30:19 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 17:30:19 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:30:19 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:30:19 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 121 ms +2016-04-08 17:30:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 17:30:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 17:30:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 17:30:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 17:30:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 17:30:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 17:30:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 17:30:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 17:30:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 17:30:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 17:30:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 17:30:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 17:30:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 17:30:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 17:30:19 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 17:30:19 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:30:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:30:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@1a2460e7 +2016-04-08 17:30:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@7be0130e +2016-04-08 17:30:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6d73002f +2016-04-08 17:30:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@43335e4c +2016-04-08 17:30:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 131 ms +2016-04-08 17:30:19 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 17:30:20 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 17:30:20 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 17:30:20 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 17:30:20 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 17:30:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:30:20 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:30:20 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 17:30:20 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-08 17:30:20 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 17:30:20 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:30:20 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 17:30:20 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:30:21 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:30:21 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 17:30:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 17:30:24 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 17:30:24 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:30:24 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 17:30:24 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:30:24 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:30:24 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 17:30:24 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 17:30:24 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 17:30:24 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 17:30:24 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 17:30:24 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 17:30:24 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 17:30:24 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 17:30:24 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 17:30:24 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 17:30:24 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:30:24 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 17:30:24 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 17:30:24 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 17:30:24 DEBUG WPS2SM:201 - Schema: null +2016-04-08 17:30:24 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 17:30:24 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 17:30:24 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 17:30:24 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:30:24 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 17:30:24 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 17:30:24 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:30:24 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:30:24 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 17:30:24 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 17:30:24 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 17:30:24 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 17:30:24 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 17:30:24 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 17:30:24 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 17:30:24 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 17:30:24 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 17:30:24 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 17:30:24 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 17:30:24 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:30:24 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 17:30:24 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 17:30:24 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:30:24 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:30:24 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 17:30:24 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 17:30:24 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 17:30:24 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:30:24 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 17:30:24 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 17:30:24 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:30:24 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:30:24 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 17:30:24 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 17:30:24 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 17:30:24 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:30:24 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 17:30:24 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 17:30:24 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:30:24 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:30:24 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 17:30:24 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 17:30:24 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 17:30:24 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:30:24 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 17:30:24 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 17:31:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:31:11 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:32:19 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 17:32:19 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 17:32:19 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 17:32:19 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 17:32:19 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 17:32:19 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 17:32:19 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 17:32:19 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@432437ad +2016-04-08 17:32:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 17:32:19 INFO ASLSession:352 - Logging the entrance +2016-04-08 17:32:19 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 17:32:19 DEBUG TemplateModel:83 - 2016-04-08 17:32:19, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 17:32:19 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:32:19 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 17:32:22 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 17:32:22 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 17:32:22 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 17:32:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 17:32:22 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 17:32:22 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:32:22 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:32:22 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 119 ms +2016-04-08 17:32:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 17:32:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 17:32:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 17:32:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 17:32:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 17:32:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 17:32:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 17:32:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 17:32:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 17:32:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 17:32:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 17:32:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 17:32:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 17:32:22 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 17:32:22 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 17:32:22 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:32:22 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:32:22 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@30b44a8a +2016-04-08 17:32:22 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@5bd7cf85 +2016-04-08 17:32:22 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@1933d187 +2016-04-08 17:32:22 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@5bcb63f0 +2016-04-08 17:32:23 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 161 ms +2016-04-08 17:32:23 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 17:32:23 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 17:32:23 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 17:32:23 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 17:32:23 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 17:32:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 17:32:23 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:32:23 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 17:32:23 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 35 ms +2016-04-08 17:32:23 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 17:32:23 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:32:23 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 17:32:23 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:32:24 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:32:24 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 17:32:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:32:26 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:32:26 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:32:26 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 17:32:26 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:32:26 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:32:27 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 17:32:27 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 17:32:27 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 17:32:27 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 17:32:27 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 17:32:27 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 17:32:27 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 17:32:27 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 17:32:27 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 17:32:27 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 17:32:27 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:32:27 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 17:32:27 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 17:32:27 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 17:32:27 DEBUG WPS2SM:201 - Schema: null +2016-04-08 17:32:27 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 17:32:27 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 17:32:27 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 17:32:27 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:32:27 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 17:32:27 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 17:32:27 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:32:27 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:32:27 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 17:32:27 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 17:32:27 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 17:32:27 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 17:32:27 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 17:32:27 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 17:32:27 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 17:32:27 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 17:32:27 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 17:32:27 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 17:32:27 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 17:32:27 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:32:27 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 17:32:27 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 17:32:27 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:32:27 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:32:27 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 17:32:27 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 17:32:27 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 17:32:27 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:32:27 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 17:32:27 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 17:32:27 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:32:27 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:32:27 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 17:32:27 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 17:32:27 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 17:32:27 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:32:27 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 17:32:27 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 17:32:27 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:32:27 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:32:27 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 17:32:27 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 17:32:27 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 17:32:27 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:32:27 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 17:32:27 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 17:33:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:33:14 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 17:34:25 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 17:34:25 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 17:34:25 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 17:34:25 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 17:34:25 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 17:34:25 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:34:25 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 17:34:25 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@5eb83295 +2016-04-08 17:34:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:34:25 INFO ASLSession:352 - Logging the entrance +2016-04-08 17:34:25 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 17:34:25 DEBUG TemplateModel:83 - 2016-04-08 17:34:25, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 17:34:25 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:34:25 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 17:34:28 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 17:34:28 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 17:34:28 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 17:34:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:34:28 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:34:28 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:34:28 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:34:29 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 122 ms +2016-04-08 17:34:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 17:34:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 17:34:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 17:34:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 17:34:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 17:34:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 17:34:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 17:34:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 17:34:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 17:34:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 17:34:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 17:34:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 17:34:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 17:34:29 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 17:34:29 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 17:34:29 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:34:29 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:34:29 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6fd257d4 +2016-04-08 17:34:29 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@21d5627a +2016-04-08 17:34:29 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@680dbe35 +2016-04-08 17:34:29 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@735f0217 +2016-04-08 17:34:29 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 119 ms +2016-04-08 17:34:29 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 17:34:29 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 17:34:29 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 17:34:29 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 17:34:29 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 17:34:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:34:29 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:34:29 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 17:34:29 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 27 ms +2016-04-08 17:34:29 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 17:34:29 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:34:29 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 17:34:29 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:34:30 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:34:30 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 17:34:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:34:33 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:34:33 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:34:33 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 17:34:33 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:34:33 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:34:34 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 17:34:34 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 17:34:34 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 17:34:34 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 17:34:34 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 17:34:34 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 17:34:34 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 17:34:34 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 17:34:34 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 17:34:34 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 17:34:34 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:34:34 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 17:34:34 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 17:34:34 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 17:34:34 DEBUG WPS2SM:201 - Schema: null +2016-04-08 17:34:34 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 17:34:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 17:34:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 17:34:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:34:34 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 17:34:34 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 17:34:34 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:34:34 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:34:34 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 17:34:34 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 17:34:34 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 17:34:34 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 17:34:34 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 17:34:34 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 17:34:34 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 17:34:34 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 17:34:34 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 17:34:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 17:34:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 17:34:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:34:34 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 17:34:34 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 17:34:34 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:34:34 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:34:34 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 17:34:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 17:34:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 17:34:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:34:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 17:34:34 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 17:34:34 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:34:34 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:34:34 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 17:34:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 17:34:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 17:34:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:34:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 17:34:34 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 17:34:34 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:34:34 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:34:34 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 17:34:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 17:34:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 17:34:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:34:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 17:34:34 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 17:35:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 17:35:20 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 17:36:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:36:15 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 17:36:39 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 17:36:39 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 17:36:39 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 17:36:39 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 17:36:39 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 17:36:39 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 17:36:39 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 17:36:39 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@5eb83295 +2016-04-08 17:36:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:36:39 INFO ASLSession:352 - Logging the entrance +2016-04-08 17:36:39 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 17:36:39 DEBUG TemplateModel:83 - 2016-04-08 17:36:39, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 17:36:39 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:36:39 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 17:36:42 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 17:36:42 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 17:36:42 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 17:36:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:36:42 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:36:42 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:36:42 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:36:42 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 124 ms +2016-04-08 17:36:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 17:36:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 17:36:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 17:36:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 17:36:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 17:36:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 17:36:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 17:36:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 17:36:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 17:36:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 17:36:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 17:36:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 17:36:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 17:36:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 17:36:42 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 17:36:42 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:36:42 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:36:42 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@23f891ae +2016-04-08 17:36:42 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@26a09125 +2016-04-08 17:36:42 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@168e08b1 +2016-04-08 17:36:42 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@232e2678 +2016-04-08 17:36:42 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 122 ms +2016-04-08 17:36:43 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 17:36:43 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 17:36:43 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 17:36:43 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 17:36:43 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 17:36:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:36:43 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:36:43 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 17:36:43 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 27 ms +2016-04-08 17:36:43 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 17:36:43 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:36:43 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 17:36:43 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:36:43 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:36:43 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 17:36:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 17:36:46 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 17:36:46 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:36:46 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 17:36:46 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:36:46 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:36:47 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 17:36:47 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 17:36:47 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 17:36:47 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 17:36:47 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 17:36:47 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 17:36:47 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 17:36:47 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 17:36:47 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 17:36:47 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 17:36:47 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:36:47 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 17:36:47 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 17:36:47 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 17:36:47 DEBUG WPS2SM:201 - Schema: null +2016-04-08 17:36:47 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 17:36:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 17:36:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 17:36:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:36:47 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 17:36:47 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 17:36:47 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:36:47 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:36:47 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 17:36:47 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 17:36:47 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 17:36:47 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 17:36:47 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 17:36:47 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 17:36:47 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 17:36:47 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 17:36:47 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 17:36:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 17:36:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 17:36:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:36:47 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 17:36:47 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 17:36:47 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:36:47 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:36:47 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 17:36:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 17:36:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 17:36:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:36:47 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 17:36:47 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 17:36:47 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:36:47 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:36:47 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 17:36:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 17:36:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 17:36:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:36:47 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 17:36:47 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 17:36:47 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:36:47 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:36:47 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 17:36:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 17:36:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 17:36:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:36:47 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 17:36:47 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 17:37:31 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 17:37:31 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 17:37:31 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 17:37:31 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 17:37:31 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 17:37:31 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 17:37:31 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 17:37:31 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@5ba9400f +2016-04-08 17:37:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 17:37:31 INFO ASLSession:352 - Logging the entrance +2016-04-08 17:37:31 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 17:37:31 DEBUG TemplateModel:83 - 2016-04-08 17:37:31, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 17:37:31 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:37:31 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 17:37:36 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 17:37:36 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 17:37:36 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 17:37:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:37:36 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:37:36 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:37:36 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:37:36 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 133 ms +2016-04-08 17:37:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 17:37:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 17:37:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 17:37:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 17:37:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 17:37:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 17:37:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 17:37:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 17:37:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 17:37:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 17:37:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 17:37:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 17:37:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 17:37:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 17:37:36 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 17:37:36 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:37:36 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:37:36 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@75e4d712 +2016-04-08 17:37:36 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@388cdf4 +2016-04-08 17:37:36 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@5a04d557 +2016-04-08 17:37:36 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@5b59660d +2016-04-08 17:37:36 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 107 ms +2016-04-08 17:37:36 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 17:37:36 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 17:37:36 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 17:37:36 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 17:37:36 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 17:37:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:37:36 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:37:36 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 17:37:37 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 31 ms +2016-04-08 17:37:37 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 17:37:37 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:37:37 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 17:37:37 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:37:37 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:37:37 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 17:37:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 17:37:42 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 17:37:42 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:37:42 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 17:37:42 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:37:42 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:37:42 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 17:37:42 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 17:37:42 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 17:37:42 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 17:37:42 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 17:37:42 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 17:37:42 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 17:37:42 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 17:37:42 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 17:37:42 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 17:37:42 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:37:42 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 17:37:42 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 17:37:42 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 17:37:42 DEBUG WPS2SM:201 - Schema: null +2016-04-08 17:37:42 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 17:37:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 17:37:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 17:37:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:37:42 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 17:37:42 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 17:37:42 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:37:42 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:37:42 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 17:37:42 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 17:37:42 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 17:37:42 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 17:37:42 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 17:37:42 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 17:37:42 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 17:37:42 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 17:37:42 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 17:37:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 17:37:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 17:37:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:37:42 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 17:37:42 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 17:37:42 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:37:42 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:37:42 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 17:37:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 17:37:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 17:37:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:37:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 17:37:42 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 17:37:42 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:37:42 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:37:42 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 17:37:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 17:37:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 17:37:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:37:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 17:37:42 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 17:37:42 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:37:42 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:37:42 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 17:37:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 17:37:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 17:37:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:37:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 17:37:42 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 17:38:43 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 17:38:43 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 17:38:43 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 17:38:43 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 17:38:43 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 17:38:43 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:38:43 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 17:38:43 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@d90d321 +2016-04-08 17:38:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:38:43 INFO ASLSession:352 - Logging the entrance +2016-04-08 17:38:43 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 17:38:43 DEBUG TemplateModel:83 - 2016-04-08 17:38:43, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 17:38:43 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:38:43 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 17:38:47 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 17:38:47 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 17:38:47 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 17:38:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 17:38:47 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 17:38:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:38:47 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:38:48 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 127 ms +2016-04-08 17:38:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 17:38:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 17:38:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 17:38:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 17:38:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 17:38:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 17:38:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 17:38:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 17:38:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 17:38:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 17:38:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 17:38:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 17:38:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 17:38:48 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 17:38:48 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 17:38:48 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:38:48 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:38:48 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6100cdfb +2016-04-08 17:38:48 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@2cecec49 +2016-04-08 17:38:48 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@2eb60ab2 +2016-04-08 17:38:48 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@473451ed +2016-04-08 17:38:48 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 132 ms +2016-04-08 17:38:48 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 17:38:48 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 17:38:48 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 17:38:48 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 17:38:48 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 17:38:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 17:38:48 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:38:48 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 17:38:48 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 31 ms +2016-04-08 17:38:48 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 17:38:48 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:38:48 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 17:38:48 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:38:49 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:38:49 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 17:38:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 17:38:51 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 17:38:51 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:38:51 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 17:38:51 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:38:51 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:38:52 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 17:38:52 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 17:38:52 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 17:38:52 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 17:38:52 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 17:38:52 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 17:38:52 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 17:38:52 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 17:38:52 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 17:38:52 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 17:38:52 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:38:52 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 17:38:52 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 17:38:52 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 17:38:52 DEBUG WPS2SM:201 - Schema: null +2016-04-08 17:38:52 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 17:38:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 17:38:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 17:38:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:38:52 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 17:38:52 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 17:38:52 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:38:52 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:38:52 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 17:38:52 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 17:38:52 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 17:38:52 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 17:38:52 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 17:38:52 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 17:38:52 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 17:38:52 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 17:38:52 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 17:38:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 17:38:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 17:38:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:38:52 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 17:38:52 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 17:38:52 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:38:52 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:38:52 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 17:38:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 17:38:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 17:38:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:38:52 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 17:38:52 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 17:38:52 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:38:52 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:38:52 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 17:38:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 17:38:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 17:38:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:38:52 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 17:38:52 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 17:38:52 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:38:52 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:38:52 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 17:38:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 17:38:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 17:38:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:38:52 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 17:38:52 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 17:39:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:39:39 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:40:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 17:40:34 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 17:41:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 17:41:29 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 17:42:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:42:24 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 17:43:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:43:19 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 17:44:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 17:44:14 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 17:45:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 17:45:09 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 17:46:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 17:46:04 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 17:46:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 17:46:59 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 17:47:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:47:54 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:48:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 17:48:49 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 17:49:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:49:44 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 17:50:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:50:39 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 17:51:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 17:51:34 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 17:52:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:52:29 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 17:53:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:53:24 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 17:54:20 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 17:54:20 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 17:54:20 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 17:54:20 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 17:54:20 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 17:54:20 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 17:54:20 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 17:54:20 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2bbc0c9e +2016-04-08 17:54:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 17:54:20 INFO ASLSession:352 - Logging the entrance +2016-04-08 17:54:20 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 17:54:20 DEBUG TemplateModel:83 - 2016-04-08 17:54:20, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 17:54:20 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:54:20 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 17:54:24 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 17:54:24 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 17:54:24 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 17:54:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:54:24 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 17:54:24 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:54:24 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:54:24 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 118 ms +2016-04-08 17:54:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 17:54:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 17:54:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 17:54:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 17:54:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 17:54:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 17:54:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 17:54:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 17:54:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 17:54:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 17:54:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 17:54:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 17:54:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 17:54:24 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 17:54:24 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 17:54:24 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:54:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:54:24 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@3bcf35c4 +2016-04-08 17:54:24 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@50ad7a49 +2016-04-08 17:54:24 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@b4394a0 +2016-04-08 17:54:24 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@480ca664 +2016-04-08 17:54:25 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 118 ms +2016-04-08 17:54:25 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 17:54:25 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 17:54:25 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 17:54:25 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 17:54:25 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 17:54:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 17:54:25 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:54:25 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 17:54:25 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 26 ms +2016-04-08 17:54:25 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 17:54:25 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:54:25 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 17:54:25 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:54:26 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:54:26 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 17:54:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 38 +2016-04-08 17:54:29 DEBUG ASLSession:458 - Getting security token: null in thread 38 +2016-04-08 17:54:29 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:54:29 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 17:54:29 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:54:29 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:54:30 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 17:54:30 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 17:54:30 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 17:54:30 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 17:54:30 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 17:54:30 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 17:54:30 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 17:54:30 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 17:54:30 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 17:54:30 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 17:54:30 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:54:30 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 17:54:30 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 17:54:30 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 17:54:30 DEBUG WPS2SM:201 - Schema: null +2016-04-08 17:54:30 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 17:54:30 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 17:54:30 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 17:54:30 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:54:30 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 17:54:30 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 17:54:30 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:54:30 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:54:30 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 17:54:30 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 17:54:30 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 17:54:30 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 17:54:30 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 17:54:30 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 17:54:30 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 17:54:30 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 17:54:30 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 17:54:30 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 17:54:30 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 17:54:30 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:54:30 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 17:54:30 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 17:54:30 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:54:30 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:54:30 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 17:54:30 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 17:54:30 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 17:54:30 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:54:30 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 17:54:30 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 17:54:30 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:54:30 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:54:30 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 17:54:30 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 17:54:30 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 17:54:30 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:54:30 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 17:54:30 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 17:54:30 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:54:30 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:54:30 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 17:54:30 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 17:54:30 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 17:54:30 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:54:30 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 17:54:30 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 17:55:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 38 +2016-04-08 17:55:15 DEBUG ASLSession:458 - Getting security token: null in thread 38 +2016-04-08 17:56:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 17:56:10 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 17:57:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 17:57:05 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 17:58:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 38 +2016-04-08 17:58:00 DEBUG ASLSession:458 - Getting security token: null in thread 38 +2016-04-08 17:58:55 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 17:58:55 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 17:58:55 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 17:58:55 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 17:58:55 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 17:58:55 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 17:58:55 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 17:58:55 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2f8ef2f2 +2016-04-08 17:58:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:58:55 INFO ASLSession:352 - Logging the entrance +2016-04-08 17:58:55 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 17:58:55 DEBUG TemplateModel:83 - 2016-04-08 17:58:55, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 17:58:55 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:58:55 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 17:58:59 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 17:58:59 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 17:58:59 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 17:58:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:58:59 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 17:58:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:58:59 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:59:00 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 128 ms +2016-04-08 17:59:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 17:59:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 17:59:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 17:59:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 17:59:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 17:59:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 17:59:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 17:59:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 17:59:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 17:59:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 17:59:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 17:59:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 17:59:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 17:59:00 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 17:59:00 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 17:59:00 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:59:00 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 17:59:00 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@2de2029f +2016-04-08 17:59:00 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@16c42f61 +2016-04-08 17:59:00 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@702b5459 +2016-04-08 17:59:00 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@3f2ad442 +2016-04-08 17:59:00 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 105 ms +2016-04-08 17:59:00 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 17:59:00 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 17:59:00 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 17:59:00 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 17:59:00 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 17:59:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:59:00 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 17:59:00 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 17:59:00 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 28 ms +2016-04-08 17:59:00 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 17:59:00 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:59:00 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 17:59:00 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:59:01 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:59:01 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 17:59:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 17:59:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 17:59:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 17:59:03 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 17:59:03 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:59:03 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 17:59:04 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 17:59:04 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 17:59:04 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 17:59:04 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 17:59:04 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 17:59:04 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 17:59:04 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 17:59:04 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 17:59:04 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 17:59:04 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 17:59:04 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 17:59:04 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 17:59:04 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 17:59:04 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 17:59:04 DEBUG WPS2SM:201 - Schema: null +2016-04-08 17:59:04 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 17:59:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 17:59:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 17:59:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:59:04 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 17:59:04 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 17:59:04 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:59:04 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:59:04 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 17:59:04 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 17:59:04 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 17:59:04 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 17:59:04 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 17:59:04 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 17:59:04 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 17:59:04 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 17:59:04 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 17:59:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 17:59:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 17:59:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:59:04 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 17:59:04 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 17:59:04 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:59:04 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 17:59:04 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 17:59:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 17:59:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 17:59:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:59:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 17:59:04 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 17:59:04 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:59:04 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:59:04 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 17:59:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 17:59:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 17:59:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:59:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 17:59:04 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 17:59:04 DEBUG WPS2SM:93 - WPS type: +2016-04-08 17:59:04 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 17:59:04 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 17:59:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 17:59:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 17:59:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 17:59:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 17:59:04 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 17:59:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 17:59:50 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 18:00:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 18:00:45 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 18:01:36 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 18:01:36 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 18:01:36 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 18:01:36 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 18:01:36 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 18:01:36 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:01:36 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 18:01:36 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@3fd537d +2016-04-08 18:01:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:01:36 INFO ASLSession:352 - Logging the entrance +2016-04-08 18:01:36 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 18:01:36 DEBUG TemplateModel:83 - 2016-04-08 18:01:36, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 18:01:36 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:01:36 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:01:41 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 18:01:41 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 18:01:41 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 18:01:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:01:41 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:01:41 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:01:41 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:01:41 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 123 ms +2016-04-08 18:01:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 18:01:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 18:01:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 18:01:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 18:01:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 18:01:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 18:01:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 18:01:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 18:01:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 18:01:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 18:01:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 18:01:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 18:01:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 18:01:41 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 18:01:41 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 18:01:42 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:01:42 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:01:42 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@5b3d6e2b +2016-04-08 18:01:42 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1b150b2d +2016-04-08 18:01:42 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@306230e6 +2016-04-08 18:01:42 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@24bd8a2f +2016-04-08 18:01:42 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 106 ms +2016-04-08 18:01:42 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 18:01:42 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 18:01:42 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 18:01:42 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 18:01:42 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 18:01:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:01:42 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:01:42 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 18:01:42 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 24 ms +2016-04-08 18:01:42 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 18:01:42 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:01:42 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 18:01:42 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:01:43 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:01:43 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 18:01:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:01:47 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:01:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:01:47 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:01:47 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:01:47 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:01:47 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:01:47 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:01:47 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:01:47 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:01:47 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:01:47 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:01:47 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:01:47 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:01:47 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:01:47 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:01:47 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:01:47 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:01:47 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:01:47 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:01:47 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:01:47 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:01:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:01:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:01:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:01:47 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:01:47 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:01:47 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:01:47 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:01:47 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:01:47 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:01:47 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:01:47 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:01:47 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:01:47 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:01:47 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:01:47 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:01:47 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:01:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:01:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:01:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:01:47 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:01:47 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:01:47 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:01:47 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:01:47 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:01:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:01:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:01:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:01:47 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:01:47 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:01:47 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:01:47 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:01:47 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:01:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:01:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:01:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:01:47 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:01:47 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:01:47 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:01:47 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:01:47 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:01:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:01:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:01:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:01:47 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:01:47 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:02:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:02:31 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:02:58 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 18:02:58 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 18:02:58 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 18:02:58 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 18:02:58 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 18:02:58 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:02:58 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 18:02:58 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@114557c5 +2016-04-08 18:02:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:02:58 INFO ASLSession:352 - Logging the entrance +2016-04-08 18:02:58 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 18:02:58 DEBUG TemplateModel:83 - 2016-04-08 18:02:58, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 18:02:58 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:02:58 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:03:02 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 18:03:02 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 18:03:02 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 18:03:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:03:02 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:03:02 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:03:02 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:03:02 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 146 ms +2016-04-08 18:03:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 18:03:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 18:03:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 18:03:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 18:03:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 18:03:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 18:03:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 18:03:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 18:03:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 18:03:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 18:03:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 18:03:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 18:03:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 18:03:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 18:03:02 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 18:03:02 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:03:02 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:03:02 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@3c75f63f +2016-04-08 18:03:02 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@4933310d +2016-04-08 18:03:02 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@ce95890 +2016-04-08 18:03:02 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@6f6e192c +2016-04-08 18:03:02 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 94 ms +2016-04-08 18:03:03 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 18:03:03 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 18:03:03 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 18:03:03 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 18:03:03 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 18:03:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:03:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:03:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 18:03:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-08 18:03:03 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 18:03:03 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:03:03 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 18:03:03 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:03:03 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:03:03 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 18:03:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:03:06 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:03:06 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:03:06 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:03:06 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:03:06 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:03:07 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:03:07 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:03:07 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:03:07 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:03:07 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:03:07 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:03:07 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:03:07 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:03:07 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:03:07 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:03:07 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:03:07 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:03:07 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:03:07 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:03:07 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:03:07 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:03:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:03:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:03:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:03:07 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:03:07 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:03:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:03:07 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:03:07 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:03:07 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:03:07 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:03:07 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:03:07 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:03:07 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:03:07 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:03:07 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:03:07 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:03:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:03:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:03:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:03:07 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:03:07 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:03:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:03:07 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:03:07 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:03:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:03:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:03:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:03:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:03:07 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:03:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:03:07 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:03:07 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:03:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:03:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:03:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:03:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:03:07 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:03:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:03:07 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:03:07 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:03:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:03:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:03:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:03:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:03:07 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:04:14 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 18:04:14 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 18:04:14 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 18:04:14 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 18:04:14 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 18:04:14 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:04:14 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 18:04:14 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@69c24885 +2016-04-08 18:04:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:04:14 INFO ASLSession:352 - Logging the entrance +2016-04-08 18:04:14 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 18:04:14 DEBUG TemplateModel:83 - 2016-04-08 18:04:14, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 18:04:14 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:04:14 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:04:18 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 18:04:18 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 18:04:18 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 18:04:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:04:18 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 18:04:18 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:04:18 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:04:19 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 122 ms +2016-04-08 18:04:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 18:04:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 18:04:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 18:04:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 18:04:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 18:04:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 18:04:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 18:04:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 18:04:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 18:04:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 18:04:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 18:04:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 18:04:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 18:04:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 18:04:19 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 18:04:19 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:04:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:04:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@3a0fec3b +2016-04-08 18:04:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1c0da36b +2016-04-08 18:04:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@2e6c7387 +2016-04-08 18:04:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@4168755d +2016-04-08 18:04:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 120 ms +2016-04-08 18:04:19 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 18:04:19 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 18:04:19 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 18:04:19 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 18:04:19 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 18:04:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:04:19 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:04:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 18:04:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-08 18:04:19 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 18:04:19 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:04:19 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 18:04:19 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:04:20 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:04:20 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 18:04:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:04:22 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:04:22 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:04:22 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:04:22 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:04:22 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:04:23 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:04:23 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:04:23 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:04:23 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:04:23 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:04:23 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:04:23 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:04:23 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:04:23 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:04:23 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:04:23 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:04:23 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:04:23 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:04:23 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:04:23 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:04:23 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:04:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:04:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:04:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:04:23 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:04:23 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:04:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:04:23 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:04:23 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:04:23 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:04:23 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:04:23 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:04:23 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:04:23 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:04:23 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:04:23 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:04:23 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:04:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:04:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:04:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:04:23 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:04:23 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:04:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:04:23 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:04:23 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:04:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:04:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:04:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:04:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:04:23 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:04:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:04:23 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:04:23 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:04:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:04:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:04:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:04:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:04:23 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:04:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:04:23 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:04:23 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:04:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:04:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:04:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:04:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:04:23 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:05:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:05:09 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:06:05 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 18:06:05 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 18:06:05 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 18:06:06 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 18:06:06 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 18:06:06 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:06:06 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 18:06:06 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@67d013a4 +2016-04-08 18:06:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:06:06 INFO ASLSession:352 - Logging the entrance +2016-04-08 18:06:06 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 18:06:06 DEBUG TemplateModel:83 - 2016-04-08 18:06:06, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 18:06:06 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:06:06 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:06:09 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 18:06:09 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 18:06:09 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 18:06:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:06:09 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:06:09 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:06:09 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:06:09 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 125 ms +2016-04-08 18:06:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 18:06:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 18:06:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 18:06:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 18:06:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 18:06:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 18:06:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 18:06:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 18:06:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 18:06:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 18:06:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 18:06:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 18:06:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 18:06:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 18:06:09 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 18:06:09 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:06:09 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:06:09 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@7e266 +2016-04-08 18:06:09 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@2de2029f +2016-04-08 18:06:09 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@16c42f61 +2016-04-08 18:06:09 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@3bc6f722 +2016-04-08 18:06:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 117 ms +2016-04-08 18:06:09 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 18:06:09 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 18:06:09 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 18:06:09 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 18:06:09 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 18:06:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:06:09 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:06:09 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 18:06:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 28 ms +2016-04-08 18:06:09 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 18:06:09 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:06:09 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 18:06:09 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:06:10 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:06:10 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 18:06:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:06:15 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:06:15 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:06:15 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:06:15 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:06:15 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:06:15 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:06:15 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:06:15 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:06:15 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:06:15 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:06:15 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:06:15 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:06:15 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:06:15 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:06:15 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:06:15 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:06:15 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:06:15 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:06:15 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:06:15 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:06:15 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:06:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:06:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:06:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:06:15 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:06:15 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:06:15 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:06:15 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:06:15 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:06:15 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:06:15 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:06:15 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:06:15 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:06:15 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:06:15 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:06:15 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:06:15 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:06:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:06:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:06:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:06:15 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:06:15 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:06:15 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:06:15 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:06:15 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:06:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:06:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:06:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:06:15 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:06:15 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:06:15 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:06:15 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:06:15 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:06:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:06:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:06:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:06:15 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:06:15 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:06:15 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:06:15 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:06:15 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:06:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:06:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:06:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:06:15 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:06:15 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:06:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:06:23 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:06:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:06:23 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-08 18:06:23 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:06:23 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:06:23 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:06:23 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:06:23 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:06:23 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:06:23 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:06:23 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-08 18:06:23 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-08 18:06:23 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-08 18:06:23 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 18:06:23 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:06:23 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:06:23 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:06:23 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:06:23 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:06:23 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:06:23 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:06:23 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:06:23 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:06:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:06:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:06:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:06:23 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:06:23 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:06:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:06:23 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:06:23 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:06:23 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:06:23 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:06:23 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:06:23 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:06:23 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:06:23 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:06:23 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:06:23 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:06:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:06:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:06:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:06:23 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:06:23 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:06:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:06:23 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:06:23 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:06:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:06:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:06:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:06:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:06:23 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-08 18:06:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:06:23 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:06:23 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-08 18:06:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-08 18:06:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-08 18:06:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:06:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-08 18:06:23 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-08 18:06:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:06:23 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:06:23 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:06:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-08 18:06:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-08 18:06:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:06:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:06:23 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-08 18:06:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:06:23 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:06:23 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-08 18:06:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-08 18:06:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-08 18:06:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:06:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-08 18:06:23 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:06:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:06:23 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:06:23 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 18:06:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 18:06:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:06:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:06:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 18:06:23 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 18:06:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:06:30 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 18:06:30 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:06:30 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:06:30 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:06:30 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:06:30 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:06:30 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:06:30 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:06:30 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:06:30 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:06:30 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:06:30 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:06:30 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:06:30 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:06:30 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:06:30 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:06:30 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:06:30 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:06:30 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:06:30 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:06:30 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:06:30 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:06:30 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:06:30 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:06:30 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:06:30 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:06:30 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:06:30 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:06:30 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:06:30 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:06:30 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:06:30 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:06:30 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:06:30 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:06:30 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:06:30 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:06:30 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:06:30 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:06:30 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:06:30 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:06:30 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:06:30 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:06:30 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:06:30 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:06:30 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:06:30 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:06:30 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:06:30 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:06:30 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:06:30 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:06:30 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:06:30 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:06:30 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:06:30 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:06:30 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:06:30 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:06:30 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:06:30 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:06:30 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:06:30 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:06:30 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:06:30 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:06:30 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:06:30 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:06:30 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:06:30 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:07:19 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 18:07:19 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 18:07:19 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 18:07:19 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 18:07:19 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 18:07:19 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:07:19 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 18:07:19 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@73986812 +2016-04-08 18:07:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:07:19 INFO ASLSession:352 - Logging the entrance +2016-04-08 18:07:19 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 18:07:19 DEBUG TemplateModel:83 - 2016-04-08 18:07:19, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 18:07:19 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:07:19 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:07:23 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 18:07:23 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 18:07:23 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 18:07:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:07:23 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:07:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:07:23 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:07:23 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 123 ms +2016-04-08 18:07:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 18:07:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 18:07:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 18:07:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 18:07:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 18:07:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 18:07:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 18:07:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 18:07:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 18:07:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 18:07:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 18:07:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 18:07:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 18:07:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 18:07:24 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 18:07:24 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:07:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:07:24 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@726e6af6 +2016-04-08 18:07:24 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@6f6f4cc0 +2016-04-08 18:07:24 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@422273cd +2016-04-08 18:07:24 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@7d6ae78b +2016-04-08 18:07:24 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 115 ms +2016-04-08 18:07:24 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 18:07:24 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 18:07:24 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 18:07:24 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 18:07:24 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 18:07:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:07:24 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:07:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 18:07:24 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-08 18:07:24 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 18:07:24 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:07:24 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 18:07:24 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:07:25 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:07:25 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 18:07:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:07:29 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:07:29 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:07:29 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:07:29 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:07:29 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:07:29 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:07:29 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:07:29 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:07:29 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:07:29 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:07:29 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:07:29 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:07:29 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:07:29 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:07:29 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:07:29 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:07:29 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:07:29 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:07:29 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:07:29 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:07:29 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:07:29 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:07:29 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:07:29 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:07:29 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:07:29 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:07:29 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:07:29 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:07:29 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:07:29 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:07:29 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:07:29 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:07:29 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:07:29 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:07:29 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:07:29 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:07:29 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:07:29 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:07:29 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:07:29 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:07:29 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:07:29 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:07:29 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:07:29 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:07:29 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:07:29 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:07:29 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:07:29 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:07:29 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:07:29 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:07:29 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:07:29 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:07:29 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:07:29 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:07:29 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:07:29 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:07:29 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:07:29 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:07:29 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:07:29 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:07:29 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:07:29 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:07:29 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:07:29 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:07:29 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:07:29 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:08:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:08:14 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:09:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:09:09 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 18:10:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:10:04 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:10:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:10:59 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:11:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:11:54 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:12:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:12:49 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:13:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:13:44 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:14:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:14:39 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:15:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:15:34 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:16:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:16:29 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:17:33 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 18:17:33 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 18:17:33 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 18:17:33 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 18:17:33 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 18:17:33 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:17:33 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 18:17:33 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@44d3110 +2016-04-08 18:17:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:17:33 INFO ASLSession:352 - Logging the entrance +2016-04-08 18:17:33 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 18:17:33 DEBUG TemplateModel:83 - 2016-04-08 18:17:33, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 18:17:33 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:17:33 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:17:36 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 18:17:36 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 18:17:36 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 18:17:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:17:36 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:17:36 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:17:36 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:17:36 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 118 ms +2016-04-08 18:17:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 18:17:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 18:17:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 18:17:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 18:17:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 18:17:36 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 18:17:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 18:17:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 18:17:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 18:17:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 18:17:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 18:17:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 18:17:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 18:17:37 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 18:17:37 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 18:17:37 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:17:37 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:17:37 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@185ca78e +2016-04-08 18:17:37 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@3639f904 +2016-04-08 18:17:37 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@f8134b8 +2016-04-08 18:17:37 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@b0db86c +2016-04-08 18:17:37 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 111 ms +2016-04-08 18:17:37 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 18:17:37 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 18:17:37 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 18:17:37 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 18:17:37 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 18:17:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:17:37 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:17:37 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 18:17:37 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 20 ms +2016-04-08 18:17:37 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 18:17:37 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:17:37 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 18:17:37 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:17:38 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:17:38 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 18:17:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 18:17:40 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 18:17:40 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:17:40 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:17:40 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:17:40 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:17:41 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:17:41 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:17:41 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:17:41 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:17:41 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:17:41 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:17:41 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:17:41 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:17:41 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:17:41 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:17:41 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:17:41 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:17:41 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:17:41 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:17:41 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:17:41 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:17:41 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:17:41 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:17:41 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:17:41 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:17:41 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:17:41 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:17:41 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:17:41 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:17:41 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:17:41 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:17:41 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:17:41 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:17:41 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:17:41 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:17:41 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:17:41 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:17:41 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:17:41 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:17:41 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:17:41 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:17:41 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:17:41 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:17:41 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:17:41 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:17:41 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:17:41 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:17:41 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:17:41 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:17:41 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:17:41 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:17:41 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:17:41 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:17:41 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:17:41 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:17:41 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:17:41 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:17:41 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:17:41 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:17:41 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:17:41 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:17:41 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:17:41 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:17:41 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:17:41 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:17:41 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:18:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:18:28 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:19:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:19:23 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:20:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:20:18 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:21:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:21:13 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:21:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:21:33 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:21:33 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:21:33 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:21:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:21:36 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:21:36 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:21:36 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:21:37 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:21:37 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 18:21:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:21:40 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:21:40 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:21:40 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:21:40 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:21:40 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:21:40 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:21:40 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:21:40 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:21:40 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:21:40 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:21:40 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:21:40 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:21:40 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:21:40 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:21:40 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:21:40 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:21:40 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:21:40 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:21:40 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:21:40 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:21:40 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:21:40 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:21:40 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:21:40 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:21:40 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:21:40 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:21:40 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:21:40 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:21:40 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:21:40 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:21:40 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:21:40 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:21:40 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:21:40 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:21:40 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:21:40 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:21:40 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:21:40 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:21:40 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:21:40 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:21:40 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:21:40 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:21:40 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:21:40 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:21:40 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:21:40 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:21:40 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:21:40 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:21:40 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:21:40 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:21:40 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:21:40 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:21:40 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:21:40 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:21:40 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:21:40 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:21:40 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:21:40 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:21:40 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:21:40 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:21:40 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:21:40 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:21:40 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:21:40 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:21:40 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:21:40 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:22:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:22:28 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:23:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 18:23:23 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 18:24:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:24:18 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:25:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:25:13 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 18:26:11 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 18:26:11 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 18:26:11 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 18:26:11 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 18:26:11 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 18:26:11 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:26:11 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 18:26:11 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@6b8ebe53 +2016-04-08 18:26:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:26:11 INFO ASLSession:352 - Logging the entrance +2016-04-08 18:26:11 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 18:26:11 DEBUG TemplateModel:83 - 2016-04-08 18:26:11, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 18:26:11 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:26:11 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:26:15 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 18:26:15 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 18:26:15 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 18:26:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:26:15 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:26:15 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:26:15 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:26:15 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 128 ms +2016-04-08 18:26:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 18:26:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 18:26:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 18:26:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 18:26:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 18:26:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 18:26:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 18:26:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 18:26:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 18:26:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 18:26:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 18:26:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 18:26:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 18:26:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 18:26:15 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 18:26:15 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:26:15 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:26:15 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@7702ab8e +2016-04-08 18:26:15 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@594a3452 +2016-04-08 18:26:15 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@7a206b61 +2016-04-08 18:26:15 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@23003139 +2016-04-08 18:26:15 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 124 ms +2016-04-08 18:26:15 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 18:26:15 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 18:26:15 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 18:26:15 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 18:26:15 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 18:26:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:26:15 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:26:15 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 18:26:15 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-08 18:26:15 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 18:26:15 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:26:15 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 18:26:15 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:26:16 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:26:16 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 18:26:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:26:20 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:26:20 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:26:20 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:26:20 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:26:20 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:26:21 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:26:21 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:26:21 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:26:21 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:26:21 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:26:21 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:26:21 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:26:21 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:26:21 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:26:21 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:26:21 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:26:21 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:26:21 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:26:21 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:26:21 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:26:21 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:26:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:26:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:26:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:26:21 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:26:21 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:26:21 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:26:21 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:26:21 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:26:21 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:26:21 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:26:21 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:26:21 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:26:21 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:26:21 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:26:21 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:26:21 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:26:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:26:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:26:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:26:21 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:26:21 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:26:21 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:26:21 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:26:21 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:26:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:26:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:26:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:26:21 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:26:21 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:26:21 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:26:21 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:26:21 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:26:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:26:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:26:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:26:21 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:26:21 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:26:21 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:26:21 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:26:21 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:26:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:26:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:26:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:26:21 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:26:21 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:27:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 18:27:06 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 18:27:45 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 18:27:45 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 18:27:45 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 18:27:45 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 18:27:45 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 18:27:45 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:27:45 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 18:27:45 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@1c212f75 +2016-04-08 18:27:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:27:45 INFO ASLSession:352 - Logging the entrance +2016-04-08 18:27:45 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 18:27:45 DEBUG TemplateModel:83 - 2016-04-08 18:27:45, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 18:27:45 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:27:45 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:27:48 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 18:27:48 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 18:27:48 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 18:27:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:27:48 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:27:48 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:27:48 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:27:49 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 120 ms +2016-04-08 18:27:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 18:27:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 18:27:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 18:27:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 18:27:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 18:27:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 18:27:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 18:27:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 18:27:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 18:27:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 18:27:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 18:27:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 18:27:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 18:27:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 18:27:49 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 18:27:49 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:27:49 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:27:49 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6b195895 +2016-04-08 18:27:49 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@290c53f +2016-04-08 18:27:49 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@306230e6 +2016-04-08 18:27:49 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@24bd8a2f +2016-04-08 18:27:49 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 119 ms +2016-04-08 18:27:49 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 18:27:49 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 18:27:49 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 18:27:49 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 18:27:49 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 18:27:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:27:49 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:27:49 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 18:27:49 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 34 ms +2016-04-08 18:27:49 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 18:27:49 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:27:49 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 18:27:49 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:27:50 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:27:50 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 18:27:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:27:53 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:27:53 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:27:53 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:27:53 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:27:53 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:27:54 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:27:54 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:27:54 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:27:54 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:27:54 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:27:54 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:27:54 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:27:54 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:27:54 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:27:54 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:27:54 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:27:54 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:27:54 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:27:54 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:27:54 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:27:54 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:27:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:27:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:27:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:27:54 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:27:54 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:27:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:27:54 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:27:54 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:27:54 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:27:54 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:27:54 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:27:54 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:27:54 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:27:54 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:27:54 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:27:54 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:27:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:27:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:27:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:27:54 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:27:54 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:27:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:27:54 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:27:54 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:27:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:27:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:27:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:27:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:27:54 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:27:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:27:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:27:54 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:27:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:27:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:27:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:27:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:27:54 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:27:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:27:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:27:54 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:27:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:27:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:27:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:27:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:27:54 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:28:45 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 18:28:45 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 18:28:45 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 18:28:45 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 18:28:45 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 18:28:45 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:28:45 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 18:28:45 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@516b298e +2016-04-08 18:28:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:28:45 INFO ASLSession:352 - Logging the entrance +2016-04-08 18:28:45 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 18:28:45 DEBUG TemplateModel:83 - 2016-04-08 18:28:45, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 18:28:45 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:28:45 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:28:49 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 18:28:49 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 18:28:49 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 18:28:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:28:49 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:28:49 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:28:49 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:28:49 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 137 ms +2016-04-08 18:28:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 18:28:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 18:28:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 18:28:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 18:28:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 18:28:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 18:28:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 18:28:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 18:28:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 18:28:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 18:28:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 18:28:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 18:28:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 18:28:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 18:28:49 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 18:28:49 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:28:50 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:28:50 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@1581a399 +2016-04-08 18:28:50 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@7faaadef +2016-04-08 18:28:50 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@e7e5191 +2016-04-08 18:28:50 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@4a56cd46 +2016-04-08 18:28:50 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 112 ms +2016-04-08 18:28:50 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 18:28:50 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 18:28:50 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 18:28:50 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 18:28:50 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 18:28:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:28:50 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:28:50 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 18:28:50 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 37 ms +2016-04-08 18:28:50 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 18:28:50 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:28:50 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 18:28:50 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:28:51 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:28:51 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 18:28:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:28:53 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:28:53 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:28:53 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:28:53 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:28:53 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:28:54 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:28:54 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:28:54 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:28:54 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:28:54 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:28:54 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:28:54 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:28:54 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:28:54 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:28:54 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:28:54 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:28:54 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:28:54 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:28:54 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:28:54 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:28:54 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:28:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:28:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:28:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:28:54 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:28:54 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:28:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:28:54 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:28:54 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:28:54 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:28:54 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:28:54 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:28:54 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:28:54 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:28:54 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:28:54 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:28:54 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:28:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:28:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:28:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:28:54 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:28:54 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:28:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:28:54 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:28:54 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:28:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:28:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:28:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:28:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:28:54 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:28:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:28:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:28:54 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:28:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:28:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:28:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:28:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:28:54 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:28:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:28:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:28:54 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:28:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:28:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:28:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:28:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:28:54 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:29:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:29:40 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 18:30:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:30:35 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:31:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:31:30 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:32:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:32:25 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:32:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:32:29 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:32:29 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:32:29 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:32:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:32:33 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 18:32:33 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:32:33 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:32:33 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:32:33 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 18:32:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:32:37 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:32:37 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:32:37 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:32:37 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:32:37 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:32:37 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:32:37 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:32:37 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:32:37 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:32:37 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:32:37 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:32:37 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:32:37 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:32:37 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:32:37 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:32:37 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:32:37 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:32:37 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:32:37 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:32:37 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:32:37 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:32:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:32:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:32:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:32:37 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:32:37 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:32:37 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:32:37 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:32:37 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:32:37 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:32:37 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:32:37 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:32:37 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:32:37 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:32:37 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:32:37 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:32:37 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:32:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:32:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:32:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:32:37 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:32:37 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:32:37 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:32:37 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:32:37 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:32:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:32:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:32:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:32:37 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:32:37 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:32:37 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:32:37 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:32:37 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:32:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:32:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:32:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:32:37 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:32:37 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:32:37 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:32:37 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:32:37 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:32:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:32:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:32:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:32:37 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:32:37 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:33:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:33:24 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 18:34:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:34:19 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:35:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:35:14 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:36:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:36:09 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:36:56 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 18:36:56 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 18:36:56 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 18:36:56 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 18:36:56 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 18:36:56 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:36:56 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 18:36:56 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@4e60e880 +2016-04-08 18:36:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:36:56 INFO ASLSession:352 - Logging the entrance +2016-04-08 18:36:56 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 18:36:56 DEBUG TemplateModel:83 - 2016-04-08 18:36:56, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 18:36:56 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:36:56 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:37:00 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 18:37:00 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 18:37:00 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 18:37:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:37:00 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:37:00 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:37:01 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:37:01 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 128 ms +2016-04-08 18:37:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 18:37:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 18:37:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 18:37:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 18:37:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 18:37:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 18:37:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 18:37:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 18:37:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 18:37:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 18:37:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 18:37:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 18:37:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 18:37:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 18:37:01 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 18:37:01 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:37:01 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:37:01 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@273d3b2c +2016-04-08 18:37:01 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@4334b104 +2016-04-08 18:37:01 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@44818350 +2016-04-08 18:37:01 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@5326a678 +2016-04-08 18:37:01 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 121 ms +2016-04-08 18:37:01 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 18:37:01 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 18:37:01 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 18:37:01 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 18:37:01 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 18:37:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:37:01 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:37:01 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 18:37:01 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-08 18:37:01 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 18:37:01 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:37:01 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 18:37:01 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:37:02 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:37:02 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 18:37:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:37:05 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 18:37:05 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:37:05 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:37:05 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:37:05 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:37:06 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:37:06 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:37:06 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:37:06 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:37:06 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:37:06 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:37:06 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:37:06 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:37:06 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:37:06 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:37:06 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:37:06 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:37:06 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:37:06 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:37:06 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:37:06 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:37:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:37:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:37:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:37:06 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:37:06 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:37:06 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:37:06 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:37:06 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:37:06 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:37:06 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:37:06 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:37:06 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:37:06 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:37:06 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:37:06 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:37:06 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:37:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:37:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:37:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:37:06 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:37:06 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:37:06 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:37:06 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:37:06 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:37:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:37:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:37:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:37:06 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:37:06 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:37:06 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:37:06 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:37:06 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:37:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:37:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:37:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:37:06 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:37:06 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:37:06 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:37:06 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:37:06 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:37:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:37:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:37:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:37:06 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:37:06 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:37:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:37:51 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 18:41:48 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 18:41:48 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 18:41:48 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 18:41:49 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 18:41:49 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 18:41:49 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:41:49 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 18:41:49 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@6aeb6c52 +2016-04-08 18:41:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:41:49 INFO ASLSession:352 - Logging the entrance +2016-04-08 18:41:49 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 18:41:49 DEBUG TemplateModel:83 - 2016-04-08 18:41:49, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 18:41:49 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:41:49 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:41:59 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 18:41:59 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 18:41:59 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:41:59 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 18:41:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:41:59 INFO ASLSession:352 - Logging the entrance +2016-04-08 18:41:59 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 18:41:59 DEBUG TemplateModel:83 - 2016-04-08 18:41:59, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 18:41:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:41:59 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:42:03 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 18:42:03 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 18:42:03 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 18:42:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:42:03 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:42:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:42:03 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:42:03 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 120 ms +2016-04-08 18:42:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 18:42:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 18:42:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 18:42:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 18:42:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 18:42:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 18:42:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 18:42:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 18:42:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 18:42:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 18:42:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 18:42:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 18:42:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 18:42:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 18:42:03 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 18:42:04 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:42:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:42:04 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@770417ac +2016-04-08 18:42:04 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@3ae8c1c6 +2016-04-08 18:42:04 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@3ef140e7 +2016-04-08 18:42:04 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@1f07ef9d +2016-04-08 18:42:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 254 ms +2016-04-08 18:42:04 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 18:42:04 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 18:42:04 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 18:42:04 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 18:42:04 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 18:42:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:42:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:42:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 18:42:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-08 18:42:04 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 18:42:04 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:42:04 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 18:42:04 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:42:10 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:42:10 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 18:42:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:42:13 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:42:13 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:42:13 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:42:13 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:42:13 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:42:14 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:42:14 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:42:14 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:42:14 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:42:14 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:42:14 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:42:14 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:42:14 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:42:14 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:42:14 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:42:14 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:42:14 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:42:14 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:42:14 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:42:14 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:42:14 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:42:14 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:42:14 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:42:14 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:42:14 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:42:14 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:42:14 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:42:14 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:42:14 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:42:14 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:42:14 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:42:14 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:42:14 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:42:14 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:42:14 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:42:14 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:42:14 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:42:14 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:42:14 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:42:14 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:42:14 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:42:14 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:42:14 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:42:14 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:42:14 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:42:14 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:42:14 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:42:14 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:42:14 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:42:14 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:42:14 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:42:14 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:42:14 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:42:14 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:42:14 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:42:14 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:42:14 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:42:14 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:42:14 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:42:14 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:42:14 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:42:14 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:42:14 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:42:14 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:42:14 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:42:14 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:42:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:42:54 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:43:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:43:49 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:44:42 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 18:44:42 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 18:44:42 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 18:44:42 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 18:44:42 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 18:44:42 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:44:42 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 18:44:42 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@21f1189c +2016-04-08 18:44:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:44:42 INFO ASLSession:352 - Logging the entrance +2016-04-08 18:44:42 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 18:44:42 DEBUG TemplateModel:83 - 2016-04-08 18:44:42, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 18:44:42 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:44:42 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:44:45 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 18:44:45 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 18:44:45 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 18:44:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:44:45 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:44:45 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:44:45 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:44:45 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 115 ms +2016-04-08 18:44:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 18:44:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 18:44:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 18:44:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 18:44:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 18:44:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 18:44:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 18:44:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 18:44:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 18:44:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 18:44:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 18:44:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 18:44:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 18:44:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 18:44:46 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 18:44:46 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:44:46 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:44:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@1da4137d +2016-04-08 18:44:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@2e23e001 +2016-04-08 18:44:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@130ede12 +2016-04-08 18:44:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@281ee14f +2016-04-08 18:44:46 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 108 ms +2016-04-08 18:44:46 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 18:44:46 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 18:44:46 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 18:44:46 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 18:44:46 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 18:44:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:44:46 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:44:46 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 18:44:46 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 19 ms +2016-04-08 18:44:46 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 18:44:46 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:44:46 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 18:44:46 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:44:47 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:44:47 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 18:44:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:44:51 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:44:51 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:44:51 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:44:51 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:44:51 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:44:51 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:44:51 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:44:51 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:44:51 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:44:51 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:44:51 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:44:51 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:44:51 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:44:51 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:44:51 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:44:51 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:44:51 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:44:51 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:44:51 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:44:51 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:44:51 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:44:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:44:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:44:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:44:51 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:44:51 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:44:51 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:44:51 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:44:51 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:44:51 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:44:51 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:44:51 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:44:51 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:44:51 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:44:51 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:44:51 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:44:51 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:44:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:44:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:44:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:44:51 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:44:51 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:44:51 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:44:51 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:44:51 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:44:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:44:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:44:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:44:51 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:44:51 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:44:51 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:44:51 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:44:51 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:44:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:44:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:44:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:44:51 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:44:51 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:44:51 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:44:51 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:44:51 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:44:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:44:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:44:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:44:51 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:44:51 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:44:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 18:44:52 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 18:44:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 18:44:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:44:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:44:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:44:52 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:44:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:44:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:44:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:44:52 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:44:52 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:44:52 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:44:52 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:44:52 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:44:52 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 18:44:52 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 18:44:52 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 18:44:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 18:44:52 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:44:52 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 18:44:52 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 19 ms +2016-04-08 18:44:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:44:52 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:44:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:44:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:44:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:44:52 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:44:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:44:52 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:44:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:44:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:44:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:44:52 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:44:52 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:44:52 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:44:52 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 18:44:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:44:52 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:44:52 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:44:52 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:44:52 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:44:52 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:44:52 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:44:52 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:44:52 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:44:52 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:44:52 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:44:52 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:44:52 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 18:44:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:44:52 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 18:44:53 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 18:44:53 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 18:44:53 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 18:44:53 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 18:44:53 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 18:44:53 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 18:44:53 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 18:44:53 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:44:53 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:44:53 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:44:53 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:44:53 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:44:53 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:44:53 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:44:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:44:53 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:44:53 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 18:44:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:44:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:44:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:44:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:44:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 18:44:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:44:53 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:44:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:44:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:44:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:44:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:44:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:44:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:44:53 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 18:44:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 18:44:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:44:53 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:44:53 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 18:44:53 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 32 ms +2016-04-08 18:44:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 18:44:53 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:44:53 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 18:44:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 18:44:54 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 18:44:54 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 18:44:54 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 18:44:54 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 18:44:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 18:44:54 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 18:44:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 18:44:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:44:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 18:44:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 18:44:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 18:44:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 18:44:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 18:44:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:44:54 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 18:44:54 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 18:44:54 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 18:44:54 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 18:44:54 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 18:44:54 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 18:44:54 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 18:44:54 INFO WorkspaceExplorerServiceImpl:142 - end time - 459 msc 0 sec +2016-04-08 18:44:54 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 18:44:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:44:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:44:54 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:44:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:44:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:45:53 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 18:45:53 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 18:45:53 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 18:45:53 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 18:45:53 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 18:45:53 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:45:53 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 18:45:53 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2c8768c9 +2016-04-08 18:45:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:45:53 INFO ASLSession:352 - Logging the entrance +2016-04-08 18:45:53 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 18:45:53 DEBUG TemplateModel:83 - 2016-04-08 18:45:53, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 18:45:53 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:45:53 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:45:56 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 18:45:56 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 18:45:56 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 18:45:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:45:56 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:45:56 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:45:56 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:45:56 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 126 ms +2016-04-08 18:45:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 18:45:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 18:45:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 18:45:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 18:45:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 18:45:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 18:45:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 18:45:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 18:45:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 18:45:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 18:45:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 18:45:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 18:45:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 18:45:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 18:45:56 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 18:45:56 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:45:56 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:45:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@38c30c6e +2016-04-08 18:45:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@e7092a4 +2016-04-08 18:45:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@54b5d460 +2016-04-08 18:45:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@3f14a497 +2016-04-08 18:45:56 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 91 ms +2016-04-08 18:45:57 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 18:45:57 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 18:45:57 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 18:45:57 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 18:45:57 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 18:45:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:45:57 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:45:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 18:45:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 24 ms +2016-04-08 18:45:57 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 18:45:57 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:45:57 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 18:45:57 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:45:58 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:45:58 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 18:46:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:46:01 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:46:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:46:01 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:46:01 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:46:01 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:46:02 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:46:02 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:46:02 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:46:02 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:46:02 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:46:02 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:46:02 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:46:02 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:46:02 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:46:02 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:46:02 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:46:02 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:46:02 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:46:02 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:46:02 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:46:02 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:46:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:46:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:46:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:46:02 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:46:02 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:46:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:46:02 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:46:02 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:46:02 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:46:02 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:46:02 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:46:02 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:46:02 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:46:02 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:46:02 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:46:02 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:46:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:46:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:46:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:46:02 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:46:02 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:46:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:46:02 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:46:02 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:46:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:46:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:46:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:46:02 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:46:02 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:46:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:46:02 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:46:02 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:46:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:46:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:46:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:46:02 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:46:02 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:46:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:46:02 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:46:02 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:46:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:46:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:46:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:46:02 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:46:02 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:46:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:46:02 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:46:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:46:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:46:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:46:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:46:02 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:46:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:46:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:46:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:46:02 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:46:02 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:46:02 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:46:02 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:46:02 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:46:02 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 18:46:02 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 18:46:02 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 18:46:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 18:46:02 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:46:02 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 18:46:02 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 18:46:02 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 18:46:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:46:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:46:02 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:46:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:46:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:46:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:46:02 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:46:02 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:46:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:46:02 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:46:02 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:46:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:46:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:46:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:46:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:46:02 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 18:46:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:46:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:46:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:46:02 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:46:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:46:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:46:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:46:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:46:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:46:02 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 18:46:02 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 18:46:02 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 18:46:02 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 18:46:02 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 18:46:02 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 18:46:02 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 18:46:02 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 18:46:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:46:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:46:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:46:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:46:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:46:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:46:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:46:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:46:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:46:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:46:02 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 18:46:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:46:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 18:46:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:46:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:46:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:46:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:46:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:46:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:46:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:46:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:46:03 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:46:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:46:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:46:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:46:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:46:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:46:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:46:03 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 18:46:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 18:46:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:46:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:46:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 18:46:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 24 ms +2016-04-08 18:46:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 18:46:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:46:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 18:46:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 18:46:03 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 18:46:03 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 18:46:03 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 18:46:03 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 18:46:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 18:46:03 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 18:46:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 18:46:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:46:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 18:46:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-08 18:46:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 18:46:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 18:46:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 18:46:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:46:03 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 18:46:03 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 18:46:03 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 18:46:03 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 18:46:03 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 18:46:03 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 18:46:03 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 18:46:03 INFO WorkspaceExplorerServiceImpl:142 - end time - 431 msc 0 sec +2016-04-08 18:46:03 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 18:46:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:46:48 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:47:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:47:43 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:47:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:47:45 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:47:45 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:47:45 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-08 18:47:45 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:47:45 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:47:46 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:47:46 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:47:46 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:47:46 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:47:46 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:47:46 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-08 18:47:46 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-08 18:47:46 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-08 18:47:46 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 18:47:46 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:47:46 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:47:46 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:47:46 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:47:46 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:47:46 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:47:46 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:47:46 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:47:46 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:47:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:47:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:47:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:47:46 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:47:46 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:47:46 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:47:46 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:47:46 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:47:46 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:47:46 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:47:46 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:47:46 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:47:46 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:47:46 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:47:46 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:47:46 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:47:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:47:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:47:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:47:46 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:47:46 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:47:46 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:47:46 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:47:46 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:47:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:47:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:47:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:47:46 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:47:46 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-08 18:47:46 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:47:46 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:47:46 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-08 18:47:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-08 18:47:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-08 18:47:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:47:46 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-08 18:47:46 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-08 18:47:46 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:47:46 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:47:46 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:47:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-08 18:47:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-08 18:47:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:47:46 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:47:46 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-08 18:47:46 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:47:46 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:47:46 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-08 18:47:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-08 18:47:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-08 18:47:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:47:46 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-08 18:47:46 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:47:46 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:47:46 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:47:46 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 18:47:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 18:47:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:47:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:47:46 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 18:47:46 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 18:47:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:47:46 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:47:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:47:46 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:47:46 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:47:46 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:47:46 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:47:46 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:47:46 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:47:46 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:47:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:47:46 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:47:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:47:46 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:47:46 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:47:46 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:47:46 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:47:46 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:47:46 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:47:46 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 18:47:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 18:47:46 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 18:47:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 18:47:46 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:47:46 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:47:46 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:47:46 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:47:46 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:47:46 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:47:46 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:47:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:47:46 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:47:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:47:46 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:47:46 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:47:46 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:47:46 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:47:46 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:47:46 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:47:46 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:47:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:47:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:47:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 18:47:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:47:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:47:46 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:47:46 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:47:46 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:47:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:47:46 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:47:46 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:47:46 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:47:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:47:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:47:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:47:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:47:46 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 18:47:46 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 18:47:46 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 18:47:46 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 18:47:46 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 18:47:46 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 18:47:46 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 18:47:46 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 18:47:46 INFO WorkspaceExplorerServiceImpl:142 - end time - 254 msc 0 sec +2016-04-08 18:47:46 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 18:47:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:47:54 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:47:54 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:47:54 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS +2016-04-08 18:47:54 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:47:54 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:47:54 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS + XMEANS + A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + + + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + + + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:47:54 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:47:54 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:47:54 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:47:54 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:47:54 DEBUG SClient4WPS:290 - WPSClient->Input: + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + +2016-04-08 18:47:54 DEBUG SClient4WPS:290 - WPSClient->Input: + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + +2016-04-08 18:47:54 DEBUG SClient4WPS:290 - WPSClient->Input: + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + +2016-04-08 18:47:54 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 18:47:54 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:47:54 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:47:54 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:47:54 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:47:54 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:47:54 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:47:54 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:47:54 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:47:54 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:47:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:47:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:47:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:47:54 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:47:54 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:47:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:47:54 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:47:54 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:47:54 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:47:54 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:47:54 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:47:54 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:47:54 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:47:54 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:47:54 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:47:54 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:47:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:47:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:47:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:47:54 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:47:54 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:47:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:47:54 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:47:54 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:47:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:47:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:47:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:47:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:47:54 DEBUG WPS2SM:254 - Conversion to SM Type->maxIterations is a Literal Input +2016-04-08 18:47:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:47:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:47:54 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:47:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:XMeans max number of overall iterations of the clustering learning +2016-04-08 18:47:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxIterations +2016-04-08 18:47:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:47:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:47:54 DEBUG WPS2SM:254 - Conversion to SM Type->minClusters is a Literal Input +2016-04-08 18:47:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:47:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:47:54 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:47:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:minimum number of expected clusters +2016-04-08 18:47:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minClusters +2016-04-08 18:47:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:47:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:47:54 DEBUG WPS2SM:254 - Conversion to SM Type->maxClusters is a Literal Input +2016-04-08 18:47:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:47:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:47:54 DEBUG WPS2SM:101 - Guessed default value: 50 +2016-04-08 18:47:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of clusters to produce +2016-04-08 18:47:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxClusters +2016-04-08 18:47:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:47:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT] +2016-04-08 18:47:54 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:47:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:47:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:47:54 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 18:47:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 18:47:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:47:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:47:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 18:47:54 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 18:47:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:47:55 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:47:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:47:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:47:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:47:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:47:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:47:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:47:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:47:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:47:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:47:55 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:47:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:47:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:47:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:47:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:47:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:47:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:47:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:47:55 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 18:47:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:47:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 18:47:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:47:55 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:47:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:47:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:47:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:47:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:47:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:47:55 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:47:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:47:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:47:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:47:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:47:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:47:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:47:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:47:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:47:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 18:47:55 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 18:47:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 18:47:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:47:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:47:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:47:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:47:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:47:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:47:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:47:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:47:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:47:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:47:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 18:47:55 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 18:47:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:47:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:47:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:47:55 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 18:47:55 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 18:47:55 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 18:47:55 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 18:47:55 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 18:47:55 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 18:47:55 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 18:47:55 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 18:47:55 INFO WorkspaceExplorerServiceImpl:142 - end time - 191 msc 0 sec +2016-04-08 18:47:55 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 18:48:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:48:38 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:49:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:49:33 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:50:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:50:28 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:51:41 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 18:51:41 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 18:51:41 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 18:51:41 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 18:51:41 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 18:51:41 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:51:41 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 18:51:41 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@21a264cf +2016-04-08 18:51:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:51:41 INFO ASLSession:352 - Logging the entrance +2016-04-08 18:51:41 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 18:51:41 DEBUG TemplateModel:83 - 2016-04-08 18:51:41, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 18:51:41 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:51:41 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:51:46 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 18:51:46 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 18:51:46 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 18:51:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:51:46 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:51:46 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:51:46 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:51:46 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 122 ms +2016-04-08 18:51:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 18:51:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 18:51:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 18:51:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 18:51:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 18:51:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 18:51:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 18:51:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 18:51:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 18:51:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 18:51:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 18:51:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 18:51:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 18:51:46 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 18:51:46 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 18:51:46 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:51:46 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:51:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@780650be +2016-04-08 18:51:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@2f10ec57 +2016-04-08 18:51:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@70827d27 +2016-04-08 18:51:46 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@30933b6d +2016-04-08 18:51:46 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 120 ms +2016-04-08 18:51:46 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 18:51:47 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 18:51:47 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 18:51:47 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 18:51:47 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 18:51:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:51:47 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:51:47 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 18:51:47 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-08 18:51:47 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 18:51:47 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:51:47 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 18:51:47 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:51:48 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:51:48 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 18:51:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:51:52 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:51:52 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:51:52 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:51:52 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:51:52 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:51:52 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:51:52 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:51:52 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:51:52 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:51:52 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:51:52 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:51:52 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:51:52 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:51:52 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:51:52 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:51:52 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:51:52 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:51:52 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:51:52 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:51:52 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:51:52 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:51:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:51:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:51:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:51:52 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:51:52 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:51:52 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:51:52 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:51:52 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:51:52 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:51:52 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:51:52 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:51:52 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:51:52 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:51:52 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:51:52 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:51:52 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:51:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:51:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:51:53 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:51:53 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:51:53 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:51:53 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:51:53 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:51:53 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:51:53 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:51:53 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:51:53 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:51:53 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:51:53 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:51:53 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:51:53 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:51:53 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:51:53 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:51:53 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:51:53 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:51:53 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:51:53 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:51:53 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:51:53 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:51:53 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:51:53 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:51:53 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:51:53 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:51:53 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:51:53 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:51:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:51:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:51:53 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:51:53 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:51:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:51:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:51:53 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:51:53 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:51:53 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:51:53 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:51:53 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:51:53 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:51:53 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:51:53 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 18:51:53 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:51:53 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:51:53 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 18:51:53 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 18:51:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 18:51:53 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:51:53 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 18:51:53 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 66 ms +2016-04-08 18:51:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:51:53 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 18:51:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:51:53 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:51:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:51:53 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:51:53 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:51:53 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:51:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:51:53 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:51:53 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:51:53 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:51:53 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:51:53 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:51:53 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 18:51:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:51:53 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:51:53 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:51:53 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:51:53 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:51:53 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:51:53 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:51:53 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 18:51:53 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:51:53 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:51:53 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:51:53 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:51:53 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:51:53 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:51:53 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 18:51:53 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 18:51:54 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 18:51:54 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 18:51:54 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 18:51:54 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 18:51:54 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 18:51:54 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 18:51:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:51:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:51:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:51:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:51:54 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 18:51:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:51:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:51:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:51:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:51:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:51:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:51:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:51:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:51:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:51:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:51:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 18:51:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:51:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:51:54 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:51:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:51:54 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:51:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:51:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:51:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:51:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:51:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:51:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:51:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:51:54 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 18:51:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:51:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:51:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:51:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 18:51:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 28 ms +2016-04-08 18:51:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:51:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:51:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 18:51:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 18:51:54 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 18:51:54 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 18:51:54 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 18:51:54 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 18:51:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:51:54 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 18:51:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:51:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:51:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 18:51:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 18:51:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:51:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:51:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:51:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:51:54 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 18:51:54 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 18:51:54 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 18:51:54 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 18:51:54 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 18:51:54 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 18:51:54 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 18:51:54 INFO WorkspaceExplorerServiceImpl:142 - end time - 451 msc 0 sec +2016-04-08 18:51:54 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 18:52:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:52:36 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:53:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:53:31 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:54:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:54:26 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:55:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:55:21 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:56:43 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 18:56:43 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 18:56:43 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 18:56:43 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 18:56:43 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 18:56:43 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:56:43 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 18:56:43 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@ef24c32 +2016-04-08 18:56:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:56:43 INFO ASLSession:352 - Logging the entrance +2016-04-08 18:56:43 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 18:56:43 DEBUG TemplateModel:83 - 2016-04-08 18:56:43, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 18:56:43 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:56:43 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:56:47 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 18:56:47 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 18:56:47 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 18:56:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:56:47 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:56:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:56:47 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:56:47 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 119 ms +2016-04-08 18:56:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 18:56:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 18:56:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 18:56:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 18:56:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 18:56:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 18:56:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 18:56:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 18:56:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 18:56:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 18:56:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 18:56:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 18:56:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 18:56:47 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 18:56:47 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 18:56:47 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:56:47 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:56:47 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@524fbbe6 +2016-04-08 18:56:47 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1732b5cb +2016-04-08 18:56:47 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@2dd2b8d2 +2016-04-08 18:56:47 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@16924b38 +2016-04-08 18:56:47 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 108 ms +2016-04-08 18:56:48 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 18:56:48 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 18:56:48 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 18:56:48 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 18:56:48 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 18:56:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:56:48 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:56:48 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 18:56:48 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 25 ms +2016-04-08 18:56:48 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 18:56:48 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:56:48 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 18:56:48 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:56:49 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:56:49 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 18:56:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 18:56:52 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 18:56:52 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:56:52 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:56:52 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:56:52 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:56:52 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:56:52 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:56:52 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:56:52 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:56:52 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:56:52 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:56:52 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:56:52 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:56:52 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:56:52 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:56:52 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:56:52 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:56:52 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:56:52 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:56:52 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:56:52 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:56:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:56:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:56:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:56:52 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:56:52 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:56:52 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:56:52 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:56:52 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:56:52 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:56:52 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:56:52 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:56:52 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:56:52 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:56:52 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:56:52 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:56:52 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:56:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:56:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:56:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:56:52 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:56:52 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:56:52 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:56:52 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:56:52 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:56:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:56:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:56:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:56:52 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:56:52 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:56:52 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:56:52 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:56:52 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:56:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:56:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:56:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:56:52 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:56:52 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:56:52 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:56:52 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:56:52 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:56:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:56:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:56:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:56:52 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:56:52 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:56:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:56:52 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:56:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:56:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:56:52 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:56:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:56:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:56:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:56:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:56:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:56:52 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:56:52 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:56:52 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:56:52 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 18:56:52 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 18:56:52 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 18:56:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 18:56:52 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:56:52 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:56:52 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:56:52 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 18:56:52 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 77 ms +2016-04-08 18:56:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:56:53 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:56:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:56:53 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:56:53 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:56:53 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:56:53 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:56:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:56:53 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 18:56:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:56:53 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:56:53 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:56:53 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:56:53 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:56:53 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 18:56:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:56:53 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:56:53 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:56:53 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:56:53 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 18:56:53 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:56:53 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:56:53 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:56:53 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:56:53 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:56:53 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:56:53 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:56:53 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:56:53 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:56:53 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 18:56:53 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 18:56:53 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 18:56:53 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 18:56:53 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 18:56:53 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 18:56:53 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 18:56:53 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 18:56:53 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:56:53 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:56:53 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:56:53 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:56:53 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:56:53 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:56:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:56:53 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:56:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:56:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:56:53 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:56:53 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 18:56:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 18:56:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:56:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:56:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:56:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:56:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:56:53 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:56:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:56:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:56:53 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:56:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:56:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:56:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:56:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:56:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:56:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:56:53 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 18:56:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:56:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:56:53 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:56:53 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 18:56:53 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 31 ms +2016-04-08 18:56:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:56:53 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:56:53 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 18:56:53 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 18:56:53 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 18:56:53 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 18:56:53 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 18:56:53 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 18:56:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:56:53 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 18:56:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:56:53 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:56:53 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 18:56:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-08 18:56:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:56:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:56:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:56:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:56:54 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 18:56:54 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 18:56:54 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 18:56:54 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 18:56:54 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 18:56:54 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 18:56:54 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 18:56:54 INFO WorkspaceExplorerServiceImpl:142 - end time - 422 msc 0 sec +2016-04-08 18:56:54 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 18:57:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:57:38 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:58:46 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 18:58:46 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 18:58:46 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 18:58:46 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 18:58:46 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 18:58:46 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:58:46 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 18:58:46 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7d1cb4b +2016-04-08 18:58:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:58:46 INFO ASLSession:352 - Logging the entrance +2016-04-08 18:58:46 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 18:58:46 DEBUG TemplateModel:83 - 2016-04-08 18:58:46, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 18:58:46 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:58:46 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 18:58:51 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 18:58:51 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 18:58:51 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 18:58:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:58:51 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:58:51 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:58:51 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:58:51 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 130 ms +2016-04-08 18:58:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 18:58:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 18:58:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 18:58:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 18:58:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 18:58:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 18:58:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 18:58:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 18:58:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 18:58:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 18:58:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 18:58:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 18:58:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 18:58:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 18:58:51 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 18:58:51 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:58:51 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 18:58:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@2c0b3eb2 +2016-04-08 18:58:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@7ed7a3a6 +2016-04-08 18:58:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@7a3e8b2f +2016-04-08 18:58:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@2da4bf0b +2016-04-08 18:58:51 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 105 ms +2016-04-08 18:58:51 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 18:58:52 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 18:58:52 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 18:58:52 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 18:58:52 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 18:58:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:58:52 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:58:52 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 18:58:52 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-08 18:58:52 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 18:58:52 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:58:52 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 18:58:52 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:58:52 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:58:52 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 18:58:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:58:56 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:58:56 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:58:56 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 18:58:56 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:58:56 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:58:57 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:58:57 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:58:57 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:58:57 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:58:57 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:58:57 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 18:58:57 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 18:58:57 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:58:57 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:58:57 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:58:57 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:58:57 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:58:57 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:58:57 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:58:57 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:58:57 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:58:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:58:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:58:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:58:57 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:58:57 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:58:57 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:58:57 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:58:57 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:58:57 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:58:57 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:58:57 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:58:57 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:58:57 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:58:57 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:58:57 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:58:57 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:58:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:58:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:58:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:58:57 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:58:57 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:58:57 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:58:57 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:58:57 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:58:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:58:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:58:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:58:57 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:58:57 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 18:58:57 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:58:57 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:58:57 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:58:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 18:58:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 18:58:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:58:57 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:58:57 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:58:57 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:58:57 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:58:57 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 18:58:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 18:58:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:58:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:58:57 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 18:58:57 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 18:58:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:58:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:58:57 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:58:57 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:58:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:58:57 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:58:57 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:58:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:58:57 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:58:57 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:58:57 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:58:57 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:58:57 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:58:57 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:58:57 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:58:57 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 18:58:57 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 18:58:57 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 18:58:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 18:58:57 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:58:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 18:58:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 99 ms +2016-04-08 18:58:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:58:57 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 18:58:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:58:57 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:58:57 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:58:57 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:58:57 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:58:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:58:57 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:58:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:58:57 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:58:57 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:58:57 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 18:58:57 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 18:58:57 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 18:58:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:58:57 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:58:57 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:58:57 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:58:57 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:58:57 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:58:57 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 18:58:57 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:58:57 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:58:57 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:58:57 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:58:57 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 18:58:57 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:58:57 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:58:57 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 18:58:58 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 18:58:58 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 18:58:58 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 18:58:58 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 18:58:58 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 18:58:58 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 18:58:58 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 18:58:58 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:58:58 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:58:58 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:58:58 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:58:58 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:58:58 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:58:58 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:58:58 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:58:58 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 18:58:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:58:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:58:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:58:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 18:58:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:58:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:58:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:58:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:58:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:58:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:58:58 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:58:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:58:58 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:58:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:58:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:58:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:58:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:58:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:58:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:58:58 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 18:58:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:58:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:58:58 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:58:58 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 18:58:58 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 34 ms +2016-04-08 18:58:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:58:58 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:58:58 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 18:58:58 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 18:58:58 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 18:58:58 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 18:58:58 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 18:58:58 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 18:58:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:58:58 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 18:58:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:58:58 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 18:58:58 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 18:58:58 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-08 18:58:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:58:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:58:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 18:58:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:58:58 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 18:58:58 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 18:58:58 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 18:58:58 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 18:58:58 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 18:58:58 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 18:58:58 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 18:58:58 INFO WorkspaceExplorerServiceImpl:142 - end time - 437 msc 0 sec +2016-04-08 18:58:58 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 18:59:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:59:09 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 18:59:09 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 18:59:09 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-08 18:59:09 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:59:09 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 18:59:09 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 18:59:09 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 18:59:09 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 18:59:09 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 18:59:09 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 18:59:09 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-08 18:59:09 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-08 18:59:09 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-08 18:59:09 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 18:59:09 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 18:59:09 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 18:59:09 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 18:59:09 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 18:59:09 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 18:59:09 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 18:59:09 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 18:59:09 DEBUG WPS2SM:201 - Schema: null +2016-04-08 18:59:09 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 18:59:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 18:59:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 18:59:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:59:09 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 18:59:09 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 18:59:09 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:59:09 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:59:09 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 18:59:09 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 18:59:09 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 18:59:09 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 18:59:09 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 18:59:09 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 18:59:09 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 18:59:09 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 18:59:09 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 18:59:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 18:59:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 18:59:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:59:09 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 18:59:09 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 18:59:09 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:59:09 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 18:59:09 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 18:59:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 18:59:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 18:59:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:59:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 18:59:09 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-08 18:59:09 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:59:09 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:59:09 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-08 18:59:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-08 18:59:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-08 18:59:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:59:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-08 18:59:09 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-08 18:59:09 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:59:09 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:59:09 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 18:59:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-08 18:59:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-08 18:59:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:59:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 18:59:09 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-08 18:59:09 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:59:09 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:59:09 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-08 18:59:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-08 18:59:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-08 18:59:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:59:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-08 18:59:09 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 18:59:09 DEBUG WPS2SM:93 - WPS type: +2016-04-08 18:59:09 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 18:59:09 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 18:59:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 18:59:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 18:59:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 18:59:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 18:59:09 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 18:59:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:59:09 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:59:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:59:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:59:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:59:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:59:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:59:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:59:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:59:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:59:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:59:09 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 18:59:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 18:59:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:59:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:59:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:59:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:59:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:59:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:59:09 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 18:59:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:59:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 18:59:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:59:09 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 18:59:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 18:59:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:59:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 18:59:09 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 18:59:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:59:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:59:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:59:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:59:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:59:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:59:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:59:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:59:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:59:09 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:59:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:59:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 18:59:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 18:59:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 18:59:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 18:59:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 18:59:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 18:59:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:59:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:59:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:59:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:59:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:59:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:59:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 18:59:09 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 18:59:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 18:59:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:59:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 18:59:09 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 18:59:09 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 18:59:09 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 18:59:09 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 18:59:09 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 18:59:09 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 18:59:09 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 18:59:09 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 18:59:09 INFO WorkspaceExplorerServiceImpl:142 - end time - 195 msc 0 sec +2016-04-08 18:59:09 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 18:59:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 18:59:41 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 19:00:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:00:36 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:01:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:01:31 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:02:14 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 19:02:14 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 19:02:14 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 19:02:14 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 19:02:14 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 19:02:14 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:02:14 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 19:02:14 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7871c4d9 +2016-04-08 19:02:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:02:14 INFO ASLSession:352 - Logging the entrance +2016-04-08 19:02:14 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 19:02:14 DEBUG TemplateModel:83 - 2016-04-08 19:02:14, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 19:02:14 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:02:14 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 19:02:19 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 19:02:19 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 19:02:19 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 19:02:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:02:19 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:02:19 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:02:19 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:02:20 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 122 ms +2016-04-08 19:02:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 19:02:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 19:02:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 19:02:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 19:02:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 19:02:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 19:02:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 19:02:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 19:02:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 19:02:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 19:02:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 19:02:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 19:02:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 19:02:20 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 19:02:20 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 19:02:20 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:02:20 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:02:20 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@42c5461c +2016-04-08 19:02:20 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@56c77c9a +2016-04-08 19:02:20 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@71b78aab +2016-04-08 19:02:20 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@4f35bb56 +2016-04-08 19:02:20 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 129 ms +2016-04-08 19:02:20 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 19:02:20 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 19:02:20 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 19:02:20 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 19:02:20 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 19:02:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:02:20 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:02:20 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 19:02:20 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 34 ms +2016-04-08 19:02:20 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 19:02:20 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:02:20 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 19:02:20 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:02:21 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:02:21 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 19:02:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:02:25 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:02:25 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:02:25 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:02:25 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:02:25 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:02:25 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:02:25 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:02:25 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:02:25 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:02:25 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:02:25 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:02:25 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:02:25 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:02:25 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:02:25 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:02:25 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:02:25 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:02:25 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:02:25 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:02:25 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:02:25 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:02:25 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:02:25 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:02:25 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:02:25 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:02:25 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:02:25 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:02:25 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:02:25 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:02:25 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:02:25 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:02:25 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:02:25 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:02:25 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:02:25 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:02:25 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:02:25 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:02:25 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:02:25 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:02:25 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:02:25 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:02:25 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:02:25 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:02:25 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:02:25 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:02:25 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:02:25 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:02:25 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:02:25 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:02:25 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:02:25 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:02:25 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:02:25 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:02:25 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:02:25 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:02:25 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:02:25 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:02:25 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:02:25 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:02:25 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:02:25 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:02:25 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:02:25 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:02:25 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:02:25 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:02:25 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:02:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:02:25 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:02:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:02:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:02:25 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 19:02:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:02:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:02:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:02:25 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:02:25 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:02:25 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:02:25 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:02:25 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:02:25 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 19:02:25 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 19:02:25 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 19:02:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 19:02:25 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:02:25 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:02:25 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:02:25 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 19:02:25 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 19:02:26 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 19:02:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:02:26 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:02:26 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:02:26 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:02:26 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:02:26 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 19:02:26 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:02:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:02:26 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 19:02:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:02:26 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:02:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:02:26 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:02:26 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:02:26 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:02:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:02:26 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:02:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:02:26 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:02:26 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:02:26 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:02:26 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:02:26 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:02:26 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:02:26 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:02:26 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 19:02:26 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:02:26 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:02:26 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 19:02:26 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 19:02:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:02:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:02:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:02:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:02:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:02:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:02:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:02:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:02:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:02:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:02:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:02:26 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:02:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:02:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:02:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:02:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:02:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:02:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:02:26 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:02:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:02:26 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:02:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:02:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:02:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:02:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:02:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:02:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:02:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:02:26 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:02:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:02:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:02:26 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:02:26 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:02:26 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 29 ms +2016-04-08 19:02:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:02:26 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:02:26 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 19:02:26 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-08 19:02:26 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 19:02:26 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 19:02:26 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 19:02:26 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 19:02:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:02:26 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 19:02:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:02:26 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:02:26 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:02:26 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 15 ms +2016-04-08 19:02:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:02:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:02:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:02:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:02:27 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:02:27 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:02:27 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:02:27 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:02:27 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:02:27 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:02:27 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:02:27 INFO WorkspaceExplorerServiceImpl:142 - end time - 425 msc 0 sec +2016-04-08 19:02:27 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:03:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:03:09 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:05:09 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 19:05:09 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 19:05:09 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 19:05:09 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 19:05:09 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 19:05:09 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:05:09 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 19:05:09 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@176fe1ff +2016-04-08 19:05:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:05:09 INFO ASLSession:352 - Logging the entrance +2016-04-08 19:05:09 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 19:05:09 DEBUG TemplateModel:83 - 2016-04-08 19:05:09, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 19:05:09 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:05:09 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 19:05:13 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 19:05:13 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 19:05:13 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 19:05:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:05:13 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:05:13 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:05:13 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:05:13 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 124 ms +2016-04-08 19:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 19:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 19:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 19:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 19:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 19:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 19:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 19:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 19:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 19:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 19:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 19:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 19:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 19:05:13 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 19:05:13 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 19:05:13 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:05:13 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:05:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@23422f65 +2016-04-08 19:05:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@144e8b6d +2016-04-08 19:05:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@659caeb9 +2016-04-08 19:05:13 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@6982cbcc +2016-04-08 19:05:13 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 126 ms +2016-04-08 19:05:14 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 19:05:14 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 19:05:14 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 19:05:14 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 19:05:14 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 19:05:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:05:14 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:05:14 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 19:05:14 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 27 ms +2016-04-08 19:05:14 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 19:05:14 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:05:14 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 19:05:14 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:05:14 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:05:14 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 19:05:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:05:19 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:05:19 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:05:19 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:05:19 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:05:19 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:05:19 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:05:19 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:05:19 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:05:19 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:05:19 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:05:19 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:05:19 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:05:19 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:05:19 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:05:19 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:05:19 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:05:19 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:05:19 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:05:19 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:05:19 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:05:19 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:05:19 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:05:19 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:05:19 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:05:19 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:05:19 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:05:19 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:05:19 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:05:19 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:05:19 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:05:19 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:05:19 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:05:19 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:05:19 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:05:19 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:05:19 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:05:19 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:05:19 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:05:19 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:05:19 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:05:19 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:05:19 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:05:19 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:05:19 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:05:19 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:05:19 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:05:19 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:05:19 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:05:19 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:05:19 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:05:19 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:05:19 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:05:19 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:05:19 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:05:19 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:05:19 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:05:19 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:05:19 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:05:19 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:05:19 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:05:19 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:05:19 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:05:19 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:05:19 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:05:19 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:05:19 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:05:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:05:19 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:05:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:05:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:05:19 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:05:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:05:19 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:05:19 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:05:19 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:05:19 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:05:19 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:05:19 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:05:19 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:05:19 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:05:19 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 19:05:19 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 19:05:19 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 19:05:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 19:05:19 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:05:19 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:05:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 19:05:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 19 ms +2016-04-08 19:05:20 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 19:05:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:05:20 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:05:20 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:05:20 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 19:05:20 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:05:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:05:20 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 19:05:20 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:05:20 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:05:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:05:20 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:05:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:05:20 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:05:20 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:05:20 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:05:20 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:05:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:05:20 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:05:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:05:20 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:05:20 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:05:20 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:05:20 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:05:20 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:05:20 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:05:20 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 19:05:20 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:05:20 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:05:20 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 19:05:20 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 19:05:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:05:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:05:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:05:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:05:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:05:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:05:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:05:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:05:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:05:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:05:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:05:20 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:05:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:05:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:05:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:05:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:05:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:05:20 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:05:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:05:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:05:20 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:05:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:05:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:05:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:05:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:05:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:05:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:05:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:05:20 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:05:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:05:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:05:20 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:05:20 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:05:20 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 30 ms +2016-04-08 19:05:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:05:20 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:05:20 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 19:05:20 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-08 19:05:20 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 19:05:20 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 19:05:20 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 19:05:20 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 19:05:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:05:20 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 19:05:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:05:20 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:05:20 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:05:20 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 18 ms +2016-04-08 19:05:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:05:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:05:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:05:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:05:20 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:05:20 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:05:20 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:05:20 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:05:20 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:05:20 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:05:21 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:05:21 INFO WorkspaceExplorerServiceImpl:142 - end time - 423 msc 0 sec +2016-04-08 19:05:21 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:06:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:06:04 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:06:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:06:06 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:06:06 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:06:06 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-08 19:06:06 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:06:06 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:06:07 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:06:07 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:06:07 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:06:07 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:06:07 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:06:07 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-08 19:06:07 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-08 19:06:07 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-08 19:06:07 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 19:06:07 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:06:07 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:06:07 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:06:07 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:06:07 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:06:07 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:06:07 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:06:07 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:06:07 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:06:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:06:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:06:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:07 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:06:07 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:06:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:06:07 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:06:07 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:06:07 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:06:07 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:06:07 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:06:07 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:06:07 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:06:07 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:06:07 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:06:07 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:06:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:06:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:06:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:07 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:06:07 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:06:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:06:07 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:06:07 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:06:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:06:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:06:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:06:07 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-08 19:06:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:06:07 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:06:07 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-08 19:06:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-08 19:06:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-08 19:06:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-08 19:06:07 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-08 19:06:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:06:07 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:06:07 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:06:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-08 19:06:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-08 19:06:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:06:07 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-08 19:06:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:06:07 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:06:07 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-08 19:06:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-08 19:06:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-08 19:06:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-08 19:06:07 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:06:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:06:07 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:06:07 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 19:06:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 19:06:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:06:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 19:06:07 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 19:06:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:06:07 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:06:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:06:07 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:06:07 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:06:07 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:06:07 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:06:07 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:06:07 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:06:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:06:07 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:06:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:06:07 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:06:07 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:06:07 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:06:07 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:06:07 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:06:07 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:06:07 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:06:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:06:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:06:07 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:06:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:06:07 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:06:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:06:07 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:06:07 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:06:07 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:06:07 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:06:07 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:06:07 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:06:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:06:07 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:06:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:06:07 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:06:07 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:06:07 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:06:07 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:06:07 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:06:07 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:06:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:06:07 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:06:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:07 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:06:07 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:06:07 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:06:07 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:06:07 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:06:07 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:06:07 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:06:07 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:06:07 INFO WorkspaceExplorerServiceImpl:142 - end time - 215 msc 0 sec +2016-04-08 19:06:07 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:06:12 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:06:12 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:06:12 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-08 19:06:12 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:06:12 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:06:12 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:06:12 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:06:12 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:06:12 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-08 19:06:12 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-08 19:06:12 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-08 19:06:12 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-08 19:06:12 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-08 19:06:12 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-08 19:06:12 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:06:12 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:06:12 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:06:12 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:06:12 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-08 19:06:12 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:06:12 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:06:12 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:06:12 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:06:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:06:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-08 19:06:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:12 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:06:12 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:06:12 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:06:12 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:06:12 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:06:12 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:06:12 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:06:12 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-08 19:06:12 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:06:12 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-08 19:06:12 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:06:12 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-08 19:06:12 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:06:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-08 19:06:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:06:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:12 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:06:12 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-08 19:06:12 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:06:12 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:06:12 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-08 19:06:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:06:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-08 19:06:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-08 19:06:12 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-08 19:06:12 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:06:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:06:12 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 19:06:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-08 19:06:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-08 19:06:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 19:06:12 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-08 19:06:12 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:06:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:06:12 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:06:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-08 19:06:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-08 19:06:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:06:12 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-08 19:06:12 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:06:12 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:06:12 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-08 19:06:12 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-08 19:06:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-08 19:06:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-08 19:06:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:12 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-08 19:06:12 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-08 19:06:12 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:06:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:06:12 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 19:06:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-08 19:06:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-08 19:06:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 19:06:12 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 19:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:06:12 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:06:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:06:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:06:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:06:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:06:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:06:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:06:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:06:12 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:06:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:06:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:06:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:06:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:06:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:06:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:06:12 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:06:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:06:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:12 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:06:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:06:12 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:06:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:06:12 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:06:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:06:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:06:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:06:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:06:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:06:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:06:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:06:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:06:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:06:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:06:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:06:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:06:12 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:06:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:12 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:06:13 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:06:13 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:06:13 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:06:13 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:06:13 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:06:13 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:06:13 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:06:13 INFO WorkspaceExplorerServiceImpl:142 - end time - 220 msc 0 sec +2016-04-08 19:06:13 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:06:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:06:24 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:06:24 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:06:24 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS +2016-04-08 19:06:24 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:06:24 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:06:24 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS + XMEANS + A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + + + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + + + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:06:24 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:06:24 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:06:24 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:06:24 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:06:24 DEBUG SClient4WPS:290 - WPSClient->Input: + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + +2016-04-08 19:06:24 DEBUG SClient4WPS:290 - WPSClient->Input: + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + +2016-04-08 19:06:24 DEBUG SClient4WPS:290 - WPSClient->Input: + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + +2016-04-08 19:06:24 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 19:06:24 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:06:24 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:06:24 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:06:24 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:06:24 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:06:24 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:06:24 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:06:24 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:06:24 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:06:24 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:06:24 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:06:24 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:24 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:06:24 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:06:24 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:06:24 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:06:24 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:06:24 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:06:24 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:06:24 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:06:24 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:06:24 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:06:24 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:06:24 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:06:24 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:06:24 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:06:24 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:06:24 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:24 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:06:24 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:06:24 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:06:24 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:06:24 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:06:24 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:06:24 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:06:24 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:24 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:06:24 DEBUG WPS2SM:254 - Conversion to SM Type->maxIterations is a Literal Input +2016-04-08 19:06:24 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:06:24 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:06:24 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:06:24 DEBUG WPS2SM:290 - Conversion to SM Type->Title:XMeans max number of overall iterations of the clustering learning +2016-04-08 19:06:24 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxIterations +2016-04-08 19:06:24 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:24 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:06:24 DEBUG WPS2SM:254 - Conversion to SM Type->minClusters is a Literal Input +2016-04-08 19:06:24 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:06:24 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:06:24 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:06:24 DEBUG WPS2SM:290 - Conversion to SM Type->Title:minimum number of expected clusters +2016-04-08 19:06:24 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minClusters +2016-04-08 19:06:24 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:24 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:06:24 DEBUG WPS2SM:254 - Conversion to SM Type->maxClusters is a Literal Input +2016-04-08 19:06:24 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:06:24 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:06:24 DEBUG WPS2SM:101 - Guessed default value: 50 +2016-04-08 19:06:24 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of clusters to produce +2016-04-08 19:06:24 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxClusters +2016-04-08 19:06:24 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:24 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT] +2016-04-08 19:06:24 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:06:24 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:06:24 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:06:24 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 19:06:24 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 19:06:24 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:06:24 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:06:24 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 19:06:24 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 19:06:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:06:24 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:06:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:06:24 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:06:24 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:06:24 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:06:24 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:06:24 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:06:24 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:06:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:06:24 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:06:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:06:24 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:06:24 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:06:24 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:06:24 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:06:24 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:06:24 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:06:24 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:06:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:06:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:06:24 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:06:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:06:24 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:06:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:06:24 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:06:24 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:06:24 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:06:24 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:06:24 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:06:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:06:24 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:06:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:06:24 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:06:24 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:06:24 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:06:24 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:06:24 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:06:24 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:06:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:24 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:06:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:06:24 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:06:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:06:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:06:24 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:06:24 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:06:24 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:06:24 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:06:24 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:06:24 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:06:24 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:06:24 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:06:24 INFO WorkspaceExplorerServiceImpl:142 - end time - 190 msc 0 sec +2016-04-08 19:06:24 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:06:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:06:59 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:07:24 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 19:07:24 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 19:07:24 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 19:07:24 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 19:07:24 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 19:07:24 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:07:24 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 19:07:24 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@32d670ed +2016-04-08 19:07:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:07:24 INFO ASLSession:352 - Logging the entrance +2016-04-08 19:07:24 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 19:07:24 DEBUG TemplateModel:83 - 2016-04-08 19:07:24, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 19:07:24 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:07:24 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 19:07:28 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 19:07:28 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 19:07:28 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 19:07:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:07:28 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:07:28 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:07:28 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:07:28 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 146 ms +2016-04-08 19:07:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 19:07:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 19:07:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 19:07:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 19:07:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 19:07:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 19:07:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 19:07:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 19:07:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 19:07:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 19:07:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 19:07:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 19:07:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 19:07:28 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 19:07:28 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 19:07:28 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:07:28 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:07:28 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@1c49b38d +2016-04-08 19:07:28 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@54bdf9b7 +2016-04-08 19:07:28 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@4e7c2c74 +2016-04-08 19:07:28 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@33631465 +2016-04-08 19:07:28 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 117 ms +2016-04-08 19:07:28 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 19:07:29 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 19:07:29 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 19:07:29 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 19:07:29 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 19:07:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:07:29 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:07:29 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 19:07:29 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-08 19:07:29 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 19:07:29 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:07:29 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 19:07:29 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:07:29 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:07:29 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 19:07:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:07:33 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:07:33 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:07:33 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:07:33 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:07:33 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:07:34 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:07:34 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:07:34 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:07:34 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:07:34 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:07:34 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:07:34 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:07:34 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:07:34 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:07:34 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:07:34 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:07:34 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:07:34 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:07:34 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:07:34 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:07:34 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:07:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:07:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:07:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:07:34 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:07:34 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:07:34 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:07:34 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:07:34 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:07:34 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:07:34 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:07:34 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:07:34 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:07:34 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:07:34 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:07:34 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:07:34 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:07:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:07:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:07:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:07:34 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:07:34 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:07:34 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:07:34 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:07:34 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:07:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:07:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:07:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:07:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:07:34 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:07:34 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:07:34 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:07:34 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:07:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:07:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:07:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:07:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:07:34 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:07:34 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:07:34 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:07:34 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:07:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:07:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:07:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:07:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:07:34 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:07:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:07:34 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:07:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:07:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:07:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:07:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:07:34 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:07:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:07:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:07:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:07:34 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:07:34 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:07:34 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:07:34 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:07:34 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:07:34 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 19:07:34 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 19:07:34 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 19:07:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:07:34 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:07:34 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 19:07:34 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 19 ms +2016-04-08 19:07:34 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 19:07:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:07:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:07:34 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:07:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:07:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:07:34 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 19:07:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:07:34 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:07:34 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 19:07:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:07:34 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:07:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:07:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:07:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:07:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:07:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:07:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:07:34 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:07:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:07:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:07:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:07:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:07:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:07:35 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:07:35 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:07:35 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 19:07:35 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:07:35 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:07:35 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 19:07:35 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 19:07:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:07:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:07:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:07:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:07:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:07:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:07:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:07:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:07:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:07:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:07:35 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:07:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:07:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:07:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:07:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:07:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:07:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:07:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:07:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:07:35 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:07:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:07:35 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:07:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:07:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:07:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:07:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:07:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:07:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:07:35 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:07:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:07:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:07:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:07:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:07:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 30 ms +2016-04-08 19:07:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:07:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:07:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 19:07:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 19:07:35 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 19:07:35 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 19:07:35 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 19:07:35 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 19:07:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:07:35 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 19:07:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:07:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:07:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:07:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 25 ms +2016-04-08 19:07:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:07:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:07:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:07:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:07:35 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:07:35 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:07:35 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:07:35 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:07:35 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:07:35 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:07:35 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:07:35 INFO WorkspaceExplorerServiceImpl:142 - end time - 438 msc 0 sec +2016-04-08 19:07:35 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:08:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:08:19 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:09:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:09:14 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:09:55 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 19:09:55 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 19:09:55 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 19:09:55 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 19:09:55 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 19:09:55 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:09:55 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 19:09:55 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2b346470 +2016-04-08 19:09:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:09:55 INFO ASLSession:352 - Logging the entrance +2016-04-08 19:09:55 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 19:09:55 DEBUG TemplateModel:83 - 2016-04-08 19:09:55, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 19:09:55 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:09:55 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 19:09:58 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 19:09:58 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 19:09:58 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 19:09:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:09:58 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:09:58 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:09:58 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:09:59 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 133 ms +2016-04-08 19:09:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 19:09:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 19:09:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 19:09:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 19:09:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 19:09:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 19:09:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 19:09:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 19:09:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 19:09:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 19:09:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 19:09:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 19:09:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 19:09:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 19:09:59 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 19:09:59 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:09:59 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:09:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@289d184b +2016-04-08 19:09:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@3ea391c9 +2016-04-08 19:09:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@73ca52ab +2016-04-08 19:09:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@115a1d47 +2016-04-08 19:09:59 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 116 ms +2016-04-08 19:09:59 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 19:09:59 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 19:09:59 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 19:09:59 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 19:09:59 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 19:09:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:09:59 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:09:59 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 19:09:59 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 26 ms +2016-04-08 19:09:59 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 19:09:59 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:09:59 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 19:09:59 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:10:00 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:10:00 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 19:10:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:10:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:10:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:10:03 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:10:03 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:10:03 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:10:03 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:10:03 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:10:03 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:10:03 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:10:03 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:10:03 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:10:03 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:10:03 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:10:03 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:10:03 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:10:03 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:10:03 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:10:03 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:10:03 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:10:03 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:10:03 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:10:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:10:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:10:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:10:03 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:10:03 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:10:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:10:03 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:10:03 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:10:03 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:10:03 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:10:03 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:10:03 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:10:03 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:10:03 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:10:03 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:10:03 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:10:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:10:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:10:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:10:03 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:10:03 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:10:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:10:03 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:10:03 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:10:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:10:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:10:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:10:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:10:03 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:10:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:10:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:10:03 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:10:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:10:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:10:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:10:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:10:03 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:10:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:10:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:10:03 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:10:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:10:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:10:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:10:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:10:03 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:10:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:10:04 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:10:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:10:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:10:04 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:10:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:10:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:10:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:10:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:10:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:10:04 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:10:04 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:10:04 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:10:04 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:10:04 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:10:04 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 19:10:04 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 19:10:04 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 19:10:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-08 19:10:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:10:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 19:10:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-08 19:10:04 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 19:10:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:10:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:10:04 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:10:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:10:04 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 19:10:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:10:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:10:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:10:04 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 19:10:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:10:04 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:10:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:10:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:10:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:10:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:10:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:10:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:10:04 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:10:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:10:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:10:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:10:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:10:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:10:04 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:10:04 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:10:04 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 19:10:04 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:10:04 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:10:04 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 19:10:04 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 19:10:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:10:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:10:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:10:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:10:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:10:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:10:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:10:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:10:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:10:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:10:04 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:10:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:10:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:10:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:10:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:10:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:10:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:10:04 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:10:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:10:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:10:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:10:04 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:10:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:10:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:10:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:10:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:10:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:10:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:10:04 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:10:05 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:10:05 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:10:05 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 26 ms +2016-04-08 19:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:10:05 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:10:05 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 19:10:05 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 19:10:05 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 19:10:05 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 19:10:05 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 19:10:05 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 19:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:10:05 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 19:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:10:05 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:10:05 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:10:05 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 19:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:10:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:10:05 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:10:05 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:10:05 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:10:05 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:10:05 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:10:05 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:10:05 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:10:05 INFO WorkspaceExplorerServiceImpl:142 - end time - 425 msc 0 sec +2016-04-08 19:10:05 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:10:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:10:50 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:11:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:11:45 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:14:53 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 19:14:53 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 19:14:53 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 19:14:53 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 19:14:53 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 19:14:53 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:14:53 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 19:14:53 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@f0104c8 +2016-04-08 19:14:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:14:53 INFO ASLSession:352 - Logging the entrance +2016-04-08 19:14:53 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 19:14:53 DEBUG TemplateModel:83 - 2016-04-08 19:14:53, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 19:14:53 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:14:53 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 19:14:56 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 19:14:56 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 19:14:56 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 19:14:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:14:56 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:14:56 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:14:56 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:14:57 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 120 ms +2016-04-08 19:14:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 19:14:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 19:14:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 19:14:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 19:14:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 19:14:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 19:14:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 19:14:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 19:14:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 19:14:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 19:14:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 19:14:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 19:14:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 19:14:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 19:14:57 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 19:14:57 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:14:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:14:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@54480ec1 +2016-04-08 19:14:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@735270ec +2016-04-08 19:14:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@7a96792f +2016-04-08 19:14:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@26d828b7 +2016-04-08 19:14:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 120 ms +2016-04-08 19:14:57 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 19:14:57 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 19:14:57 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 19:14:57 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 19:14:57 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 19:14:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:14:57 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:14:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 19:14:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 21 ms +2016-04-08 19:14:57 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 19:14:57 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:14:57 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 19:14:57 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:14:58 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:14:58 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 19:15:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:15:02 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:15:02 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:15:02 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:15:02 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:15:02 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:15:02 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:15:02 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:15:02 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:15:02 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:15:02 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:15:02 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:15:02 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:15:02 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:15:02 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:15:02 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:15:02 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:15:02 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:15:02 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:15:02 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:15:02 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:15:02 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:15:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:15:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:15:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:15:02 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:15:02 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:15:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:15:02 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:15:02 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:15:02 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:15:02 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:15:02 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:15:02 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:15:02 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:15:02 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:15:02 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:15:02 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:15:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:15:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:15:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:15:02 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:15:02 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:15:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:15:02 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:15:02 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:15:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:15:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:15:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:15:02 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:15:02 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:15:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:15:02 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:15:02 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:15:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:15:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:15:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:15:02 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:15:02 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:15:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:15:02 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:15:02 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:15:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:15:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:15:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:15:02 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:15:02 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:15:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:15:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:15:03 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:15:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:15:03 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 19:15:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:15:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:15:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:15:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:15:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:15:03 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:15:03 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:15:03 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:15:03 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:15:03 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:15:03 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 19:15:03 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 19:15:03 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 19:15:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 19:15:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:15:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 19:15:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 19:15:03 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 19:15:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:15:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:15:03 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:15:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:15:03 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 19:15:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:15:03 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 19:15:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:15:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:15:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:15:03 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 19:15:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:15:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:15:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:15:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:15:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:15:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:15:03 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:15:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:15:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:15:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:15:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:15:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:15:03 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:15:03 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:15:03 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 19:15:03 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:15:03 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:15:03 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 19:15:03 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 19:15:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:15:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:15:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:15:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:15:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:15:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:15:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:15:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:15:03 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:15:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:15:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:15:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:15:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:15:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:15:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:15:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:15:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:15:03 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:15:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:15:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:15:03 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 19:15:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:15:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:15:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:15:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:15:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:15:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:15:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:15:04 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:15:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:15:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:15:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:15:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:15:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 30 ms +2016-04-08 19:15:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:15:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:15:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 19:15:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 19:15:04 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 19:15:04 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 19:15:04 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 19:15:04 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 19:15:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:15:04 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 19:15:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:15:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:15:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:15:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 20 ms +2016-04-08 19:15:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:15:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:15:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:15:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:15:04 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:15:04 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:15:04 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:15:04 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:15:04 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:15:04 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:15:04 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:15:04 INFO WorkspaceExplorerServiceImpl:142 - end time - 419 msc 0 sec +2016-04-08 19:15:04 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:15:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:15:32 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 19:15:32 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:15:32 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-08 19:15:32 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:15:32 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:15:32 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:15:32 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:15:32 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:15:32 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-08 19:15:32 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-08 19:15:32 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-08 19:15:32 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-08 19:15:32 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-08 19:15:32 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-08 19:15:32 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:15:32 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:15:32 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:15:32 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:15:32 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-08 19:15:32 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:15:32 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:15:32 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:15:32 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:15:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:15:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-08 19:15:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:15:32 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:15:32 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:15:32 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:15:32 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:15:32 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:15:32 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:15:32 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:15:32 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-08 19:15:32 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:15:32 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-08 19:15:32 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:15:32 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-08 19:15:32 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:15:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-08 19:15:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:15:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:15:32 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:15:32 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-08 19:15:32 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:15:32 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:15:32 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-08 19:15:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:15:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-08 19:15:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:15:32 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-08 19:15:32 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-08 19:15:32 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:15:32 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:15:32 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 19:15:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-08 19:15:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-08 19:15:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:15:32 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 19:15:32 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-08 19:15:32 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:15:32 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:15:32 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:15:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-08 19:15:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-08 19:15:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:15:33 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:15:33 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-08 19:15:33 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:15:33 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:15:33 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-08 19:15:33 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-08 19:15:33 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-08 19:15:33 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-08 19:15:33 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:15:33 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-08 19:15:33 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-08 19:15:33 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:15:33 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:15:33 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 19:15:33 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-08 19:15:33 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-08 19:15:33 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:15:33 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 19:15:33 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 19:15:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:15:33 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:15:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:15:33 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:15:33 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:15:33 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:15:33 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:15:33 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:15:33 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:15:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:15:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:15:33 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:15:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:15:33 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:15:33 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:15:33 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:15:33 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:15:33 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:15:33 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:15:33 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:15:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:15:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:15:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:15:33 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:15:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:15:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:15:33 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 19:15:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:15:33 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:15:33 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:15:33 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:15:33 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:15:33 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:15:33 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:15:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:15:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:15:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:15:33 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:15:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:15:33 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:15:33 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:15:33 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:15:33 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:15:33 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:15:33 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:15:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:15:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:15:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:15:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:15:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:15:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:15:33 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:15:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:15:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:15:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:15:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:15:33 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:15:33 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:15:33 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:15:33 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:15:33 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:15:33 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:15:33 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:15:33 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:15:33 INFO WorkspaceExplorerServiceImpl:142 - end time - 197 msc 0 sec +2016-04-08 19:15:33 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:15:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:15:48 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 19:16:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:16:17 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 19:16:17 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:16:17 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:16:17 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:16:17 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:16:17 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:16:17 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:16:17 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:16:17 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:16:17 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:16:17 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:16:17 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:16:17 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:16:17 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:16:17 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:16:17 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:16:17 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:16:17 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:16:17 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:16:17 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:16:17 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:16:17 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:16:17 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:16:17 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:17 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:16:17 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:16:17 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:17 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:16:17 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:16:17 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:16:17 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:16:17 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:16:17 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:16:17 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:16:17 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:16:17 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:16:17 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:16:17 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:16:17 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:16:17 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:17 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:16:17 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:16:17 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:17 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:16:17 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:16:17 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:16:17 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:16:17 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:17 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:16:17 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:16:17 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:17 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:16:17 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:16:17 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:16:17 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:16:17 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:17 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:16:17 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:16:17 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:17 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:16:17 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:16:17 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:16:17 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:16:17 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:17 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:16:17 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:16:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:16:18 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 19:16:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:16:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:16:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:16:18 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:16:18 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:16:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:16:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:16:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:16:18 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:16:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:16:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:16:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:16:18 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:16:18 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:16:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:16:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:16:18 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:16:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:16:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:16:18 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 19:16:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:16:18 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:16:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:16:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:16:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:16:18 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:16:18 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:16:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:16:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:16:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:16:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:18 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:16:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:16:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:16:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:16:18 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:16:18 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:16:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:16:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:16:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:16:18 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:16:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:18 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:16:18 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:16:18 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:16:18 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:16:18 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:16:18 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:16:18 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:16:18 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:16:18 INFO WorkspaceExplorerServiceImpl:142 - end time - 184 msc 0 sec +2016-04-08 19:16:18 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:16:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:16:26 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:16:26 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:16:26 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS +2016-04-08 19:16:26 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:16:26 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:16:26 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS + XMEANS + A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + + + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + + + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:16:26 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:16:26 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:16:26 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:16:26 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:16:26 DEBUG SClient4WPS:290 - WPSClient->Input: + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + +2016-04-08 19:16:26 DEBUG SClient4WPS:290 - WPSClient->Input: + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + +2016-04-08 19:16:26 DEBUG SClient4WPS:290 - WPSClient->Input: + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + +2016-04-08 19:16:26 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 19:16:26 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:16:26 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:16:26 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:16:26 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:16:26 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:16:26 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:16:26 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:16:26 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:16:26 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:16:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:16:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:16:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:26 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:16:26 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:16:26 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:26 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:16:26 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:16:26 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:16:26 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:16:26 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:16:26 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:16:26 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:16:26 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:16:26 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:16:26 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:16:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:16:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:16:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:26 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:16:26 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:16:26 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:26 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:16:26 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:16:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:16:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:16:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:26 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:16:26 DEBUG WPS2SM:254 - Conversion to SM Type->maxIterations is a Literal Input +2016-04-08 19:16:26 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:26 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:16:26 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:16:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:XMeans max number of overall iterations of the clustering learning +2016-04-08 19:16:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxIterations +2016-04-08 19:16:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:26 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:16:26 DEBUG WPS2SM:254 - Conversion to SM Type->minClusters is a Literal Input +2016-04-08 19:16:26 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:26 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:16:26 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:16:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:minimum number of expected clusters +2016-04-08 19:16:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minClusters +2016-04-08 19:16:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:26 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:16:26 DEBUG WPS2SM:254 - Conversion to SM Type->maxClusters is a Literal Input +2016-04-08 19:16:26 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:26 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:16:26 DEBUG WPS2SM:101 - Guessed default value: 50 +2016-04-08 19:16:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of clusters to produce +2016-04-08 19:16:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxClusters +2016-04-08 19:16:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:26 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT] +2016-04-08 19:16:26 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:16:26 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:26 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:16:26 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 19:16:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 19:16:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:16:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:26 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 19:16:26 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 19:16:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:16:26 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:16:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:16:26 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:16:26 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:16:26 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:16:26 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:16:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:16:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:16:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:16:26 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 19:16:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:16:26 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:16:26 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:16:26 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:16:26 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:16:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:16:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:16:26 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:16:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:16:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:16:26 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:16:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:16:26 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:16:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:16:26 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:16:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:16:26 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:16:26 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:16:26 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:16:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:16:26 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:16:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:16:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:16:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:26 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:16:26 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:16:26 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:16:26 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:16:26 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:16:26 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:16:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:16:26 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:16:26 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:26 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:26 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:16:26 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:16:26 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:16:26 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:16:26 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:16:26 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:16:26 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:16:26 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:16:26 INFO WorkspaceExplorerServiceImpl:142 - end time - 168 msc 0 sec +2016-04-08 19:16:26 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:16:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:16:39 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:16:39 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:16:39 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY +2016-04-08 19:16:39 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:16:39 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:16:39 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY + CMSY + An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner. + + + IDsFile + Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK + Name of the parameter: IDsFile. Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK + + + + + + + StocksFile + Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY + Name of the parameter: StocksFile. Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY + + + + + + + SelectedStock + The stock on which the procedure has to focus e.g. HLH_M07 + Name of the parameter: SelectedStock. The stock on which the procedure has to focus e.g. HLH_M07 + + + + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:16:39 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:16:39 DEBUG SClient4WPS:290 - WPSClient->Input: + IDsFile + Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK + Name of the parameter: IDsFile. Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK + + + + + +2016-04-08 19:16:39 DEBUG SClient4WPS:290 - WPSClient->Input: + StocksFile + Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY + Name of the parameter: StocksFile. Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY + + + + + +2016-04-08 19:16:39 DEBUG SClient4WPS:290 - WPSClient->Input: + SelectedStock + The stock on which the procedure has to focus e.g. HLH_M07 + Name of the parameter: SelectedStock. The stock on which the procedure has to focus e.g. HLH_M07 + + + + + +2016-04-08 19:16:39 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:16:39 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:16:39 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:16:39 DEBUG WPS2SM:254 - Conversion to SM Type->IDsFile is a Literal Input +2016-04-08 19:16:39 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:39 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:16:39 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:16:39 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK +2016-04-08 19:16:39 DEBUG WPS2SM:291 - Conversion to SM Type->Name:IDsFile +2016-04-08 19:16:39 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:39 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=IDsFile, description=Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT] +2016-04-08 19:16:39 DEBUG WPS2SM:254 - Conversion to SM Type->StocksFile is a Literal Input +2016-04-08 19:16:39 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:39 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:16:39 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:16:39 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY +2016-04-08 19:16:39 DEBUG WPS2SM:291 - Conversion to SM Type->Name:StocksFile +2016-04-08 19:16:39 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:39 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=StocksFile, description=Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT] +2016-04-08 19:16:39 DEBUG WPS2SM:254 - Conversion to SM Type->SelectedStock is a Literal Input +2016-04-08 19:16:39 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:39 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:16:39 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:16:39 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The stock on which the procedure has to focus e.g. HLH_M07 +2016-04-08 19:16:39 DEBUG WPS2SM:291 - Conversion to SM Type->Name:SelectedStock +2016-04-08 19:16:39 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:39 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=SelectedStock, description=The stock on which the procedure has to focus e.g. HLH_M07 [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT] +2016-04-08 19:16:39 DEBUG SClient4WPS:649 - Parameters: [ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=IDsFile, description=Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=StocksFile, description=Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=SelectedStock, description=The stock on which the procedure has to focus e.g. HLH_M07 [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT]] +2016-04-08 19:16:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:16:43 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 19:16:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:16:47 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 19:16:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:16:47 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING +2016-04-08 19:16:47 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:16:47 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:16:48 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING + MAX_ENT_NICHE_MODELLING + A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt + + + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + + + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + + + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + + + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + + + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + + + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + + + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + + + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + + + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + + + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + + + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + + + + + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + + + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + + + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + + + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + + + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + + + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:16:48 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:16:48 DEBUG SClient4WPS:290 - WPSClient->Input: + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + +2016-04-08 19:16:48 DEBUG SClient4WPS:290 - WPSClient->Input: + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + +2016-04-08 19:16:48 DEBUG SClient4WPS:290 - WPSClient->Input: + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + +2016-04-08 19:16:48 DEBUG SClient4WPS:290 - WPSClient->Input: + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + +2016-04-08 19:16:48 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:16:48 DEBUG SClient4WPS:290 - WPSClient->Input: + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + +2016-04-08 19:16:48 DEBUG SClient4WPS:290 - WPSClient->Input: + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + +2016-04-08 19:16:48 DEBUG SClient4WPS:290 - WPSClient->Input: + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + +2016-04-08 19:16:48 DEBUG SClient4WPS:290 - WPSClient->Input: + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + +2016-04-08 19:16:48 DEBUG SClient4WPS:290 - WPSClient->Input: + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + +2016-04-08 19:16:48 DEBUG SClient4WPS:290 - WPSClient->Input: + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + +2016-04-08 19:16:48 DEBUG SClient4WPS:290 - WPSClient->Input: + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + +2016-04-08 19:16:48 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:16:48 DEBUG SClient4WPS:297 - WPSClient->Output: + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + +2016-04-08 19:16:48 DEBUG SClient4WPS:297 - WPSClient->Output: + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + +2016-04-08 19:16:48 DEBUG SClient4WPS:297 - WPSClient->Output: + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + +2016-04-08 19:16:48 DEBUG SClient4WPS:297 - WPSClient->Output: + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + +2016-04-08 19:16:48 DEBUG SClient4WPS:297 - WPSClient->Output: + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + +2016-04-08 19:16:48 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:16:48 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:16:48 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:16:48 DEBUG WPS2SM:254 - Conversion to SM Type->OutputTableLabel is a Literal Input +2016-04-08 19:16:48 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:48 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:16:48 DEBUG WPS2SM:101 - Guessed default value: maxent_ +2016-04-08 19:16:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the table to produce +2016-04-08 19:16:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OutputTableLabel +2016-04-08 19:16:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT] +2016-04-08 19:16:48 DEBUG WPS2SM:254 - Conversion to SM Type->SpeciesName is a Literal Input +2016-04-08 19:16:48 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:48 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:16:48 DEBUG WPS2SM:101 - Guessed default value: generic_species +2016-04-08 19:16:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the species to model and the occurrence records refer to +2016-04-08 19:16:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:SpeciesName +2016-04-08 19:16:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT] +2016-04-08 19:16:48 DEBUG WPS2SM:254 - Conversion to SM Type->MaxIterations is a Literal Input +2016-04-08 19:16:48 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:16:48 DEBUG WPS2SM:101 - Guessed default value: 1000 +2016-04-08 19:16:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The number of learning iterations of the MaxEnt algorithm +2016-04-08 19:16:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:MaxIterations +2016-04-08 19:16:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT] +2016-04-08 19:16:48 DEBUG WPS2SM:254 - Conversion to SM Type->DefaultPrevalence is a Literal Input +2016-04-08 19:16:48 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-08 19:16:48 DEBUG WPS2SM:101 - Guessed default value: 0.5 +2016-04-08 19:16:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A priori probability of presence at ordinary occurrence points +2016-04-08 19:16:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:DefaultPrevalence +2016-04-08 19:16:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT] +2016-04-08 19:16:48 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencesTable is a Complex Input +2016-04-08 19:16:48 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:16:48 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:16:48 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:16:48 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:16:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] +2016-04-08 19:16:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencesTable +2016-04-08 19:16:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:48 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:16:48 DEBUG WPS2SM:254 - Conversion to SM Type->LongitudeColumn is a Literal Input +2016-04-08 19:16:48 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:48 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:16:48 DEBUG WPS2SM:101 - Guessed default value: decimallongitude +2016-04-08 19:16:48 DEBUG WPS2SM:130 - Machter title: The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude] +2016-04-08 19:16:48 DEBUG WPS2SM:131 - Machter find: true +2016-04-08 19:16:48 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-08 19:16:48 DEBUG WPS2SM:133 - Machter start: 40 +2016-04-08 19:16:48 DEBUG WPS2SM:134 - Machter end: 82 +2016-04-08 19:16:48 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-08 19:16:48 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-08 19:16:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing longitude values [the name of a column from OccurrencesTable] +2016-04-08 19:16:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LongitudeColumn +2016-04-08 19:16:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:48 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN] +2016-04-08 19:16:48 DEBUG WPS2SM:254 - Conversion to SM Type->LatitudeColumn is a Literal Input +2016-04-08 19:16:48 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:48 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:16:48 DEBUG WPS2SM:101 - Guessed default value: decimallatitude +2016-04-08 19:16:48 DEBUG WPS2SM:130 - Machter title: The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude] +2016-04-08 19:16:48 DEBUG WPS2SM:131 - Machter find: true +2016-04-08 19:16:48 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-08 19:16:48 DEBUG WPS2SM:133 - Machter start: 39 +2016-04-08 19:16:48 DEBUG WPS2SM:134 - Machter end: 81 +2016-04-08 19:16:48 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-08 19:16:48 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-08 19:16:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing latitude values [the name of a column from OccurrencesTable] +2016-04-08 19:16:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LatitudeColumn +2016-04-08 19:16:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:48 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN] +2016-04-08 19:16:48 DEBUG WPS2SM:254 - Conversion to SM Type->XResolution is a Literal Input +2016-04-08 19:16:48 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-08 19:16:48 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:16:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the X axis in decimal degrees +2016-04-08 19:16:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:XResolution +2016-04-08 19:16:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:16:48 DEBUG WPS2SM:254 - Conversion to SM Type->YResolution is a Literal Input +2016-04-08 19:16:48 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-08 19:16:48 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:16:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the Y axis in decimal degrees +2016-04-08 19:16:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:YResolution +2016-04-08 19:16:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:16:48 DEBUG WPS2SM:254 - Conversion to SM Type->Layers is a Literal Input +2016-04-08 19:16:48 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:48 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:16:48 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:16:48 DEBUG WPS2SM:147 - Machter title: The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:16:48 DEBUG WPS2SM:148 - Machter find: true +2016-04-08 19:16:48 DEBUG WPS2SM:149 - Machter group: a sequence of values separated by | +2016-04-08 19:16:48 DEBUG WPS2SM:150 - Machter start: 501 +2016-04-08 19:16:48 DEBUG WPS2SM:151 - Machter end: 536 +2016-04-08 19:16:48 DEBUG WPS2SM:152 - Machter Group Count: 1 +2016-04-08 19:16:48 DEBUG WPS2SM:155 - Matcher separator: | +2016-04-08 19:16:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) +2016-04-08 19:16:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Layers +2016-04-08 19:16:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:48 DEBUG SClient4WPS:645 - InputParameter: ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST] +2016-04-08 19:16:48 DEBUG WPS2SM:254 - Conversion to SM Type->Z is a Literal Input +2016-04-08 19:16:48 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-08 19:16:48 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-08 19:16:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer +2016-04-08 19:16:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Z +2016-04-08 19:16:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-08 19:16:48 DEBUG WPS2SM:254 - Conversion to SM Type->TimeIndex is a Literal Input +2016-04-08 19:16:48 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:16:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:16:48 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-08 19:16:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Time Index. The default is the first time indexed in the input environmental datasets +2016-04-08 19:16:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:TimeIndex +2016-04-08 19:16:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:16:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-08 19:16:48 DEBUG SClient4WPS:649 - Parameters: [ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT], TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN], Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST], ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT]] +2016-04-08 19:16:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:16:48 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:16:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:16:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:16:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:16:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:16:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:16:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:16:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:16:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:16:48 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:16:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:16:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:16:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:16:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:16:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:16:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:16:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:16:48 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:16:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:16:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:16:48 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:16:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:16:48 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 19:16:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:16:48 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:16:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:16:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:16:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:16:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:16:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:16:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:16:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:16:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:16:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:16:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:16:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:16:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:16:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:16:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:16:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:16:48 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:16:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:16:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:16:48 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:16:48 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:16:48 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:16:48 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:16:48 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:16:48 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:16:48 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:16:48 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:16:48 INFO WorkspaceExplorerServiceImpl:142 - end time - 196 msc 0 sec +2016-04-08 19:16:48 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:17:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:17:38 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 19:18:10 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 19:18:10 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 19:18:10 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 19:18:10 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 19:18:10 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 19:18:10 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:18:10 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 19:18:10 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@75d060e0 +2016-04-08 19:18:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:18:10 INFO ASLSession:352 - Logging the entrance +2016-04-08 19:18:10 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 19:18:10 DEBUG TemplateModel:83 - 2016-04-08 19:18:10, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 19:18:10 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:18:10 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 19:18:14 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 19:18:14 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 19:18:14 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 19:18:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:18:14 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:18:14 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:18:14 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:18:14 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 129 ms +2016-04-08 19:18:14 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 19:18:14 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 19:18:14 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 19:18:14 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 19:18:14 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 19:18:14 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 19:18:14 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 19:18:14 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 19:18:14 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 19:18:14 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 19:18:14 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 19:18:14 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 19:18:14 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 19:18:14 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 19:18:14 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 19:18:14 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:18:14 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:18:14 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@61666b97 +2016-04-08 19:18:14 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@5e42bef1 +2016-04-08 19:18:14 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@2773cb64 +2016-04-08 19:18:14 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@19f1f1db +2016-04-08 19:18:14 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 95 ms +2016-04-08 19:18:14 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 19:18:14 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 19:18:14 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 19:18:14 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 19:18:14 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 19:18:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:18:14 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:18:14 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 19:18:15 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-08 19:18:15 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 19:18:15 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:18:15 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 19:18:15 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:18:15 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:18:15 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 19:18:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:18:18 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:18:18 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:18:18 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:18:18 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:18:18 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:18:18 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:18:18 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:18:18 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:18:18 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:18:18 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:18:18 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:18:18 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:18:18 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:18:18 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:18:18 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:18:18 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:18:18 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:18:18 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:18:18 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:18:18 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:18:18 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:18:18 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:18:18 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:18:18 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:18 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:18:18 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:18:18 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:18:18 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:18:18 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:18:18 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:18:18 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:18:18 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:18:18 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:18:18 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:18:18 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:18:18 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:18:18 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:18:18 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:18:18 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:18:18 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:18 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:18:18 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:18:18 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:18:18 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:18:18 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:18:18 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:18:18 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:18:18 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:18 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:18:18 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:18:18 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:18:18 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:18:18 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:18:18 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:18:18 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:18:18 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:18 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:18:18 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:18:18 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:18:18 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:18:18 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:18:18 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:18:18 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:18:18 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:18 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:18:18 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:18:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:18:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:18:19 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 19:18:19 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:18:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:18:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:18:19 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:18:19 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:18:19 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:18:19 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:18:19 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:18:19 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:18:19 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:18:19 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 19:18:19 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:18:19 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:18:19 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 19:18:19 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 19:18:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 19:18:19 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:18:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 19:18:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 19:18:19 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 19:18:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:18:19 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:18:19 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:18:19 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:18:19 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:18:19 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 19:18:19 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:18:19 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:18:19 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 19:18:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:18:19 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:18:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:18:19 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:18:19 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:18:19 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:18:19 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:18:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:18:19 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:18:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:18:19 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:18:19 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:18:19 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:18:19 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:18:19 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:18:19 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:18:19 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 19:18:19 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:18:19 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:18:19 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 19:18:19 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 19:18:19 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:18:19 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:18:19 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:18:19 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:18:19 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:18:19 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:18:19 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:18:19 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:19 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:19 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:19 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:18:19 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:18:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:18:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:19 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:18:19 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:18:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:18:19 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:18:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:19 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:19 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:19 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:19 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:18:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:18:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:18:20 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:18:20 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:18:20 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 21 ms +2016-04-08 19:18:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:18:20 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:18:20 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 19:18:20 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 19:18:20 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 19:18:20 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 19:18:20 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 19:18:20 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 19:18:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:18:20 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 19:18:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:18:20 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:18:20 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:18:20 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 19:18:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:18:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:18:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:18:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:18:20 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:18:20 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:18:20 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:18:20 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:18:20 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:18:20 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:18:20 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:18:20 INFO WorkspaceExplorerServiceImpl:142 - end time - 398 msc 0 sec +2016-04-08 19:18:20 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:18:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:18:27 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:18:27 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:18:27 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-08 19:18:27 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:18:27 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:18:27 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:18:27 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:18:27 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:18:27 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:18:27 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:18:27 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-08 19:18:27 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-08 19:18:27 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-08 19:18:27 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 19:18:27 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:18:27 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:18:27 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:18:27 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:18:27 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:18:27 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:18:27 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:18:27 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:18:27 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:18:27 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:18:27 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:18:27 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:27 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:18:27 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:18:27 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:18:27 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:18:27 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:18:27 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:18:27 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:18:27 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:18:27 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:18:27 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:18:27 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:18:27 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:18:27 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:18:27 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:18:27 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:18:27 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:27 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:18:27 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:18:27 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:18:27 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:18:27 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:18:27 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:18:27 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:18:27 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:27 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:18:27 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-08 19:18:27 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:18:27 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:18:27 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-08 19:18:27 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-08 19:18:27 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-08 19:18:27 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:27 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-08 19:18:27 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-08 19:18:27 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:18:27 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:18:27 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:18:27 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-08 19:18:27 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-08 19:18:27 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:27 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:18:27 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-08 19:18:27 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:18:27 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:18:27 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-08 19:18:27 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-08 19:18:27 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-08 19:18:27 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:27 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-08 19:18:27 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:18:27 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:18:27 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:18:27 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 19:18:27 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 19:18:27 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:18:27 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:27 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 19:18:27 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 19:18:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:18:27 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 19:18:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:18:27 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:18:27 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:18:27 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:18:27 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:18:27 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:18:27 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:18:27 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:18:28 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:18:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:18:28 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:18:28 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:18:28 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:18:28 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:18:28 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:18:28 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:18:28 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:18:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:18:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:18:28 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:18:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:18:28 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:18:28 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:18:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:18:28 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 19:18:28 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:18:28 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:18:28 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:18:28 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:18:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:18:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:28 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:18:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:18:28 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:18:28 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:18:28 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:18:28 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:18:28 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:18:28 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:18:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:18:28 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:18:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:28 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:18:28 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:18:28 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:18:28 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:18:28 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:18:28 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:18:28 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:18:28 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:18:28 INFO WorkspaceExplorerServiceImpl:142 - end time - 212 msc 0 sec +2016-04-08 19:18:28 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:18:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:18:42 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:18:42 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:18:42 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-08 19:18:42 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:18:42 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:18:42 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:18:42 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:18:42 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:18:42 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-08 19:18:42 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-08 19:18:42 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-08 19:18:42 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-08 19:18:42 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-08 19:18:42 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-08 19:18:42 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:18:42 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:18:42 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:18:42 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:18:42 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-08 19:18:42 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:18:42 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:18:42 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:18:42 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:18:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:18:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-08 19:18:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:42 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:18:42 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:18:42 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:18:42 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:18:42 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:18:42 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:18:42 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:18:42 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-08 19:18:42 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:18:42 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-08 19:18:42 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:18:42 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-08 19:18:42 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:18:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-08 19:18:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:18:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:42 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:18:42 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-08 19:18:42 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:18:42 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:18:42 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-08 19:18:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:18:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-08 19:18:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-08 19:18:42 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-08 19:18:42 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:18:42 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:18:42 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 19:18:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-08 19:18:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-08 19:18:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 19:18:42 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-08 19:18:42 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:18:42 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:18:42 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:18:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-08 19:18:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-08 19:18:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:18:42 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-08 19:18:42 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:18:42 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:18:42 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-08 19:18:42 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-08 19:18:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-08 19:18:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-08 19:18:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:42 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-08 19:18:42 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-08 19:18:42 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:18:42 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:18:42 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 19:18:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-08 19:18:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-08 19:18:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:18:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 19:18:42 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 19:18:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:18:42 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:18:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:18:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:18:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:18:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:18:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:18:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:18:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:18:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:18:42 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:18:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:18:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:18:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:18:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:18:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:18:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:18:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:18:42 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:18:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:18:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:18:42 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:18:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:18:42 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:18:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:18:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:18:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:18:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:18:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:18:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:18:42 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:18:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:18:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:18:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:18:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:18:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:18:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:18:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:18:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:18:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:18:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:18:42 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:18:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:18:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:18:43 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:18:43 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:18:43 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:18:43 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:18:43 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:18:43 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:18:43 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:18:43 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:18:43 INFO WorkspaceExplorerServiceImpl:142 - end time - 179 msc 0 sec +2016-04-08 19:18:43 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:19:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:19:05 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:20:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:20:00 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:20:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:20:55 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 19:21:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:21:50 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:22:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:22:45 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:23:50 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 19:23:50 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 19:23:50 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 19:23:50 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 19:23:50 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 19:23:50 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:23:50 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 19:23:50 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@440ff6a3 +2016-04-08 19:23:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:23:50 INFO ASLSession:352 - Logging the entrance +2016-04-08 19:23:50 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 19:23:50 DEBUG TemplateModel:83 - 2016-04-08 19:23:50, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 19:23:50 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:23:50 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 19:23:55 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 19:23:55 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 19:23:55 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 19:23:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:23:55 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 19:23:55 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:23:56 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:23:56 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 120 ms +2016-04-08 19:23:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 19:23:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 19:23:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 19:23:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 19:23:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 19:23:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 19:23:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 19:23:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 19:23:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 19:23:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 19:23:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 19:23:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 19:23:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 19:23:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 19:23:56 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 19:23:56 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:23:56 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:23:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@28f7812 +2016-04-08 19:23:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@4ef40ef2 +2016-04-08 19:23:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6a785809 +2016-04-08 19:23:56 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@98e893d +2016-04-08 19:23:56 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 101 ms +2016-04-08 19:23:56 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 19:23:56 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 19:23:56 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 19:23:56 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 19:23:56 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 19:23:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:23:56 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:23:56 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 19:23:56 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-08 19:23:56 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 19:23:56 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:23:56 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 19:23:56 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:23:57 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:23:57 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 19:24:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:24:01 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:24:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:24:01 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:24:01 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:24:01 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:24:02 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:24:02 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:24:02 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:24:02 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:24:02 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:24:02 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:24:02 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:24:02 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:24:02 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:24:02 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:24:02 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:24:02 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:24:02 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:24:02 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:24:02 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:24:02 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:24:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:24:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:24:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:24:02 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:24:02 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:24:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:24:02 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:24:02 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:24:02 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:24:02 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:24:02 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:24:02 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:24:02 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:24:02 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:24:02 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:24:02 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:24:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:24:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:24:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:24:02 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:24:02 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:24:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:24:02 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:24:02 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:24:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:24:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:24:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:24:02 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:24:02 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:24:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:24:02 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:24:02 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:24:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:24:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:24:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:24:02 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:24:02 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:24:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:24:02 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:24:02 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:24:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:24:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:24:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:24:02 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:24:02 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:24:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:24:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:24:02 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:24:02 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:24:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:24:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:24:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:24:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:24:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:24:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:24:02 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:24:02 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:24:02 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:24:02 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:24:02 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:24:02 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 19:24:02 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 19:24:02 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 19:24:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-08 19:24:02 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:24:02 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 19:24:02 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 19 ms +2016-04-08 19:24:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:24:02 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 19:24:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:24:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:24:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:24:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:24:02 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:24:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:24:02 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:24:02 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:24:02 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:24:02 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:24:02 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:24:02 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:24:02 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 19:24:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:24:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:24:02 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:24:02 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:24:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:24:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:24:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:24:02 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:24:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:24:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:24:02 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:24:02 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 19:24:02 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:24:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:24:02 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 19:24:02 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:24:03 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:24:03 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 19:24:03 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:24:03 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:24:03 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 19:24:03 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 19:24:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:24:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:24:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:24:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:24:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:24:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:24:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:24:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:24:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:24:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:24:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:24:03 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:24:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:24:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:24:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:24:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:24:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:24:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:24:03 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:24:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:24:03 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:24:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:24:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:24:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:24:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:24:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:24:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:24:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:24:03 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:24:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 19:24:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:24:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:24:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:24:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 39 ms +2016-04-08 19:24:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 19:24:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:24:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 19:24:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 19:24:03 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 19:24:03 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 19:24:03 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 19:24:03 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 19:24:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 19:24:03 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 19:24:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 19:24:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:24:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:24:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 19:24:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 19:24:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 19:24:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 19:24:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:24:03 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:24:03 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:24:03 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:24:03 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:24:03 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:24:03 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:24:03 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:24:03 INFO WorkspaceExplorerServiceImpl:142 - end time - 437 msc 0 sec +2016-04-08 19:24:03 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:24:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:24:45 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 19:25:52 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 19:25:52 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 19:25:52 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 19:25:52 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 19:25:52 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 19:25:52 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:25:52 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 19:25:52 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@21208cee +2016-04-08 19:25:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:25:52 INFO ASLSession:352 - Logging the entrance +2016-04-08 19:25:52 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 19:25:52 DEBUG TemplateModel:83 - 2016-04-08 19:25:52, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 19:25:52 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:25:52 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 19:25:57 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 19:25:57 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 19:25:57 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 19:25:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:25:57 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:25:57 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:25:57 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:25:57 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 145 ms +2016-04-08 19:25:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 19:25:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 19:25:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 19:25:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 19:25:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 19:25:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 19:25:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 19:25:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 19:25:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 19:25:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 19:25:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 19:25:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 19:25:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 19:25:57 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 19:25:57 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 19:25:57 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:25:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:25:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@439a19b +2016-04-08 19:25:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@441b05d8 +2016-04-08 19:25:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@4e408650 +2016-04-08 19:25:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@54301fa4 +2016-04-08 19:25:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 111 ms +2016-04-08 19:25:57 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 19:25:57 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 19:25:57 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 19:25:57 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 19:25:57 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 19:25:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:25:57 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:25:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 19:25:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 27 ms +2016-04-08 19:25:57 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 19:25:57 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:25:57 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 19:25:57 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:25:58 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:25:58 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 19:26:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:26:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:26:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:26:03 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:26:03 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:26:03 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:26:03 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:26:03 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:26:03 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:26:03 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:26:03 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:26:03 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:26:03 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:26:03 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:26:03 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:26:03 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:26:03 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:26:03 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:26:03 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:26:03 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:26:03 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:26:03 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:26:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:26:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:26:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:26:03 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:26:03 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:26:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:26:03 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:26:03 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:26:03 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:26:03 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:26:03 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:26:03 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:26:03 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:26:03 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:26:03 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:26:03 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:26:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:26:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:26:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:26:03 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:26:03 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:26:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:26:03 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:26:03 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:26:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:26:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:26:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:26:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:26:03 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:26:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:26:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:26:03 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:26:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:26:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:26:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:26:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:26:03 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:26:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:26:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:26:03 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:26:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:26:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:26:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:26:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:26:03 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:26:04 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:26:04 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:26:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:26:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:26:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:26:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:26:04 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:26:04 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:26:04 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:26:04 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:26:04 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:26:04 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 19:26:04 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 19:26:04 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 19:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-08 19:26:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:26:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 19:26:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 19 ms +2016-04-08 19:26:04 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 19:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:26:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:26:04 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:26:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:26:04 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 19:26:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:26:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:26:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:26:04 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 19:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:26:04 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:26:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:26:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:26:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:26:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:26:04 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:26:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:26:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:26:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:26:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:26:04 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:26:04 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:26:04 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 19:26:04 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:26:04 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:26:04 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 19:26:04 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 19:26:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:26:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:26:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:26:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:26:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:26:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:26:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:26:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:26:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:26:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:26:04 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:26:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:26:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:26:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:26:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:26:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:26:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:26:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:26:04 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:26:04 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:26:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:26:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:26:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:26:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:26:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:26:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:26:04 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:26:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:26:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:26:05 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:26:05 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:26:05 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 30 ms +2016-04-08 19:26:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:26:05 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:26:05 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 19:26:05 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 19:26:05 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 19:26:05 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 19:26:05 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 19:26:05 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 19:26:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:26:05 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 19:26:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:26:05 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:26:05 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:26:05 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 19:26:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:26:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:26:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:26:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:26:05 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:26:05 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:26:05 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:26:05 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:26:05 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:26:05 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:26:05 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:26:05 INFO WorkspaceExplorerServiceImpl:142 - end time - 418 msc 0 sec +2016-04-08 19:26:05 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:26:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:26:47 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:27:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:27:42 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:28:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:28:37 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:29:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:29:32 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:31:00 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 19:31:00 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 19:31:00 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 19:31:00 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 19:31:00 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 19:31:00 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:31:00 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 19:31:00 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@5e9122fd +2016-04-08 19:31:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:31:00 INFO ASLSession:352 - Logging the entrance +2016-04-08 19:31:00 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 19:31:00 DEBUG TemplateModel:83 - 2016-04-08 19:31:00, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 19:31:00 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:31:00 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 19:31:04 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 19:31:04 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 19:31:04 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 19:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:31:04 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:31:04 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:31:04 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:31:04 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 119 ms +2016-04-08 19:31:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 19:31:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 19:31:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 19:31:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 19:31:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 19:31:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 19:31:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 19:31:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 19:31:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 19:31:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 19:31:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 19:31:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 19:31:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 19:31:04 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 19:31:04 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 19:31:04 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:31:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:31:04 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@63a0c947 +2016-04-08 19:31:04 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@9872ec7 +2016-04-08 19:31:04 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@bcb5ca8 +2016-04-08 19:31:04 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@57b722a5 +2016-04-08 19:31:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 105 ms +2016-04-08 19:31:04 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 19:31:04 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 19:31:04 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 19:31:04 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 19:31:04 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 19:31:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:31:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:31:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 19:31:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 21 ms +2016-04-08 19:31:04 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 19:31:04 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:31:04 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 19:31:04 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:31:05 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:31:05 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 19:31:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:31:10 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:31:10 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:31:10 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:31:10 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:31:10 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:31:11 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:31:11 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:31:11 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:31:11 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:31:11 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:31:11 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:31:11 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:31:11 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:31:11 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:31:11 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:31:11 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:31:11 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:31:11 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:31:11 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:31:11 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:31:11 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:31:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:31:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:31:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:31:11 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:31:11 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:31:11 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:31:11 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:31:11 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:31:11 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:31:11 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:31:11 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:31:11 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:31:11 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:31:11 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:31:11 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:31:11 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:31:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:31:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:31:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:31:11 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:31:11 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:31:11 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:31:11 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:31:11 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:31:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:31:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:31:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:31:11 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:31:11 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:31:11 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:31:11 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:31:11 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:31:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:31:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:31:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:31:11 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:31:11 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:31:11 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:31:11 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:31:11 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:31:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:31:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:31:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:31:11 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:31:11 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:31:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:31:11 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 19:31:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:31:11 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:31:11 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:31:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:31:11 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:31:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:31:11 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:31:11 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:31:11 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:31:11 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:31:11 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:31:11 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:31:11 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:31:11 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 19:31:11 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 19:31:11 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 19:31:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:31:11 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:31:11 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 19:31:11 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 21 ms +2016-04-08 19:31:11 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 19:31:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:31:11 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:31:11 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:31:11 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:31:11 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 19:31:11 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:31:11 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:31:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:31:11 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 19:31:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:31:11 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:31:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:31:11 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:31:11 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:31:11 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:31:11 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:31:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:31:11 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:31:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:31:11 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:31:11 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:31:11 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:31:11 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:31:11 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:31:11 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:31:11 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 19:31:12 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:31:12 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:31:12 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 19:31:12 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 19:31:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:31:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:31:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:31:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:31:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:31:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:31:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:31:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:31:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:31:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:31:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:31:12 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:31:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:31:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:31:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:31:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:31:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:31:12 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:31:12 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 19:31:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:31:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:31:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:31:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:31:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:31:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:31:12 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:31:12 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:31:12 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:31:12 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 33 ms +2016-04-08 19:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:31:12 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:31:12 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 19:31:12 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 19:31:12 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 19:31:12 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 19:31:12 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 19:31:12 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 19:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:31:12 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 19:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:31:12 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:31:12 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:31:12 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 19:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:31:12 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:31:12 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:31:12 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:31:12 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:31:12 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:31:12 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:31:12 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:31:12 INFO WorkspaceExplorerServiceImpl:142 - end time - 447 msc 0 sec +2016-04-08 19:31:12 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:31:13 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:34:10 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 19:34:10 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 19:34:10 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 19:34:10 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 19:34:10 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 19:34:10 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 19:34:10 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 19:34:10 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@75062ad6 +2016-04-08 19:34:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:34:10 INFO ASLSession:352 - Logging the entrance +2016-04-08 19:34:10 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 19:34:10 DEBUG TemplateModel:83 - 2016-04-08 19:34:10, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 19:34:10 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:34:10 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 19:34:14 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 19:34:14 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 19:34:14 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 19:34:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:34:14 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:34:14 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:34:14 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:34:15 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 133 ms +2016-04-08 19:34:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 19:34:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 19:34:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 19:34:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 19:34:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 19:34:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 19:34:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 19:34:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 19:34:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 19:34:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 19:34:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 19:34:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 19:34:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 19:34:15 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 19:34:15 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 19:34:15 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:34:15 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:34:15 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@fd99b6d +2016-04-08 19:34:15 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@7686119f +2016-04-08 19:34:15 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@2bd33190 +2016-04-08 19:34:15 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@4540a39e +2016-04-08 19:34:15 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 238 ms +2016-04-08 19:34:16 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 19:34:16 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 19:34:16 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 19:34:16 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 19:34:16 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 19:34:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:34:16 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:34:16 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 19:34:16 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 28 ms +2016-04-08 19:34:16 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 19:34:16 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:34:16 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 19:34:16 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:34:16 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:34:17 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 19:34:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:34:20 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:34:20 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:34:20 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:34:20 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:34:20 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:34:20 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:34:20 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:34:20 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:34:20 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:34:20 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:34:20 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:34:20 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:34:20 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:34:20 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:34:20 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:34:20 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:34:20 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:34:20 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:34:20 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:34:20 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:34:20 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:34:20 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:34:20 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:34:20 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:34:20 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:34:20 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:34:20 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:34:20 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:34:20 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:34:20 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:34:20 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:34:20 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:34:20 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:34:20 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:34:20 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:34:20 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:34:20 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:34:20 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:34:20 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:34:20 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:34:20 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:34:20 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:34:20 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:34:20 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:34:20 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:34:20 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:34:20 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:34:20 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:34:20 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:34:20 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:34:20 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:34:20 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:34:20 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:34:20 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:34:20 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:34:20 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:34:20 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:34:20 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:34:20 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:34:20 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:34:20 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:34:20 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:34:20 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:34:20 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:34:20 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:34:20 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:34:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:34:20 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:34:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:34:20 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:34:20 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:34:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:34:20 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:34:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:34:20 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:34:20 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:34:21 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:34:21 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:34:21 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:34:21 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:34:21 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:34:21 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 19:34:21 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 19:34:21 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 19:34:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 19:34:21 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:34:21 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 19:34:21 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 19:34:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:34:21 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:34:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:34:21 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:34:21 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:34:21 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:34:21 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:34:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:34:21 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 19:34:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:34:21 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:34:21 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:34:21 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:34:21 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:34:21 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 19:34:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:34:21 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:34:21 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:34:21 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 19:34:21 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:34:21 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:34:21 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:34:21 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:34:21 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:34:21 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:34:21 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:34:21 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:34:21 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:34:21 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:34:21 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 19:34:22 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:34:22 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:34:22 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 19:34:22 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:34:22 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:34:22 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 19:34:22 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 19:34:22 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:34:22 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:34:22 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:34:22 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:34:22 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:34:22 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:34:22 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:34:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:34:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:34:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:34:22 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:34:22 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:34:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:34:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:34:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:34:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:34:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:34:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:34:22 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:34:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:34:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:34:22 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:34:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:34:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:34:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:34:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:34:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:34:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:34:22 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:34:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 19:34:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:34:22 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:34:22 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:34:22 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 31 ms +2016-04-08 19:34:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 19:34:22 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:34:22 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 19:34:22 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 19:34:22 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 19:34:22 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 19:34:22 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 19:34:22 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 19:34:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 19:34:22 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 19:34:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 19:34:22 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:34:22 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:34:22 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 19:34:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 19:34:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 19:34:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-08 19:34:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:34:22 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:34:22 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:34:22 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:34:22 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:34:22 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:34:22 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:34:22 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:34:22 INFO WorkspaceExplorerServiceImpl:142 - end time - 535 msc 0 sec +2016-04-08 19:34:22 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:34:40 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 19:34:40 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 19:34:40 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:34:40 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 19:34:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:34:40 INFO ASLSession:352 - Logging the entrance +2016-04-08 19:34:40 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 19:34:40 DEBUG TemplateModel:83 - 2016-04-08 19:34:40, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 19:34:40 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:34:40 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 19:34:43 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 19:34:43 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 19:34:43 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 19:34:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:34:43 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:34:43 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:34:43 INFO DiscoveryDelegate:77 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
(cached) +2016-04-08 19:34:43 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 19:34:43 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 19:34:43 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 19:34:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:34:43 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:34:43 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 19:34:43 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 21 ms +2016-04-08 19:34:43 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 19:34:43 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:34:43 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 19:34:43 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:34:43 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:34:43 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 19:34:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:34:47 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:34:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:34:47 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:34:47 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:34:47 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:34:48 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:34:48 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:34:48 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:34:48 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:34:48 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:34:48 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:34:48 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:34:48 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:34:48 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:34:48 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:34:48 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:34:48 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:34:48 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:34:48 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:34:48 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:34:48 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:34:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:34:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:34:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:34:48 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:34:48 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:34:48 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:34:48 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:34:48 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:34:48 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:34:48 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:34:48 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:34:48 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:34:48 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:34:48 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:34:48 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:34:48 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:34:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:34:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:34:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:34:48 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:34:48 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:34:48 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:34:48 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:34:48 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:34:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:34:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:34:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:34:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:34:48 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:34:48 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:34:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:34:48 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:34:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:34:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:34:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:34:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:34:48 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:34:48 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:34:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:34:48 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:34:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:34:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:34:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:34:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:34:48 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:34:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:34:48 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:34:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:34:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:34:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:34:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:34:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:34:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:34:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:34:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:34:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:34:48 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 19:34:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:34:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:34:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:34:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:34:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:34:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:34:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:34:48 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:34:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:34:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:34:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:34:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:34:48 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:34:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:34:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:34:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:34:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:34:48 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:34:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:34:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:34:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:34:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:34:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:34:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:34:48 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:34:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:34:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:34:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:34:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:34:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:34:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:34:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:34:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:34:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:34:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:34:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:34:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:34:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:34:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:34:48 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:34:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:34:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:34:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:34:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:34:48 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:34:48 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:34:48 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:34:48 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:34:48 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:34:48 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:34:48 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:34:48 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:34:48 INFO WorkspaceExplorerServiceImpl:142 - end time - 186 msc 0 sec +2016-04-08 19:34:48 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:35:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:35:35 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:36:40 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 19:36:40 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 19:36:40 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 19:36:40 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 19:36:40 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 19:36:40 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:36:40 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 19:36:40 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@5af41d3e +2016-04-08 19:36:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:36:40 INFO ASLSession:352 - Logging the entrance +2016-04-08 19:36:40 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 19:36:40 DEBUG TemplateModel:83 - 2016-04-08 19:36:40, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 19:36:40 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:36:40 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 19:36:44 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 19:36:44 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 19:36:44 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 19:36:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:36:44 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:36:44 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:36:45 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:36:45 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 117 ms +2016-04-08 19:36:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 19:36:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 19:36:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 19:36:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 19:36:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 19:36:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 19:36:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 19:36:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 19:36:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 19:36:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 19:36:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 19:36:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 19:36:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 19:36:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 19:36:45 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 19:36:45 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:36:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:36:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@543890cb +2016-04-08 19:36:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@76d88a39 +2016-04-08 19:36:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@1be41116 +2016-04-08 19:36:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@3a0740a6 +2016-04-08 19:36:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 116 ms +2016-04-08 19:36:45 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 19:36:45 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 19:36:45 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 19:36:45 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 19:36:45 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 19:36:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:36:45 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:36:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 19:36:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 31 ms +2016-04-08 19:36:45 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 19:36:45 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:36:45 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 19:36:45 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:36:46 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:36:46 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 19:36:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:36:50 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:36:50 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:36:50 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:36:50 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:36:50 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:36:51 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:36:51 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:36:51 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:36:51 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:36:51 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:36:51 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:36:51 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:36:51 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:36:51 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:36:51 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:36:51 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:36:51 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:36:51 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:36:51 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:36:51 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:36:51 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:36:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:36:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:36:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:36:51 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:36:51 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:36:51 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:36:51 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:36:51 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:36:51 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:36:51 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:36:51 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:36:51 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:36:51 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:36:51 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:36:51 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:36:51 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:36:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:36:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:36:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:36:51 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:36:51 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:36:51 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:36:51 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:36:51 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:36:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:36:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:36:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:36:51 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:36:51 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:36:51 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:36:51 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:36:51 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:36:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:36:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:36:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:36:51 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:36:51 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:36:51 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:36:51 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:36:51 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:36:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:36:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:36:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:36:51 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:36:51 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:36:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:36:51 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:36:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:36:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:36:51 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:36:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:36:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:36:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:36:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:36:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:36:51 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:36:51 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:36:51 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:36:51 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:36:51 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:36:51 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 19:36:51 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 19:36:51 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 19:36:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:36:51 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:36:51 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 19:36:51 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 20 ms +2016-04-08 19:36:51 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 19:36:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:36:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:36:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:36:51 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 19:36:51 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:36:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:36:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:36:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:36:51 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 19:36:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:36:51 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 19:36:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:36:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:36:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:36:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:36:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:36:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:36:51 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:36:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:36:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:36:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:36:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:36:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:36:51 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:36:51 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:36:51 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 19:36:52 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:36:52 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:36:52 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 19:36:52 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 19:36:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:36:52 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:36:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:36:52 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:36:52 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:36:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:36:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:36:52 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:36:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:36:52 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:36:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:36:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:36:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:36:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:36:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:36:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:36:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:36:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:36:53 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:36:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:36:53 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 19:36:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:36:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:36:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:36:53 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:36:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:36:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:36:53 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:36:53 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:36:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 19:36:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:36:53 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:36:53 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:36:53 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 31 ms +2016-04-08 19:36:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 19:36:53 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:36:53 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 19:36:53 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 19:36:53 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 19:36:53 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 19:36:53 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 19:36:53 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 19:36:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 19:36:53 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 19:36:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 19:36:53 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:36:53 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:36:53 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-08 19:36:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 19:36:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 19:36:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 19:36:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:36:53 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:36:53 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:36:53 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:36:53 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:36:53 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:36:53 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:36:53 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:36:53 INFO WorkspaceExplorerServiceImpl:142 - end time - 462 msc 0 sec +2016-04-08 19:36:53 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:37:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:37:35 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:38:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:38:30 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:39:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:39:25 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:40:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:40:20 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:41:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:41:15 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:43:05 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 19:43:05 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 19:43:05 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 19:43:05 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 19:43:06 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 19:43:06 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:43:06 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 19:43:06 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2a73ce3c +2016-04-08 19:43:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:43:06 INFO ASLSession:352 - Logging the entrance +2016-04-08 19:43:06 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 19:43:06 DEBUG TemplateModel:83 - 2016-04-08 19:43:06, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 19:43:06 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:43:06 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 19:43:08 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 19:43:08 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 19:43:08 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 19:43:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:43:08 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:43:08 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:43:09 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:43:09 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 114 ms +2016-04-08 19:43:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 19:43:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 19:43:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 19:43:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 19:43:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 19:43:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 19:43:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 19:43:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 19:43:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 19:43:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 19:43:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 19:43:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 19:43:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 19:43:09 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 19:43:09 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 19:43:09 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:43:09 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:43:09 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@37bdae57 +2016-04-08 19:43:09 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@f9c9127 +2016-04-08 19:43:09 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@50efd4cd +2016-04-08 19:43:09 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@4c3504a2 +2016-04-08 19:43:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 131 ms +2016-04-08 19:43:09 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 19:43:09 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 19:43:09 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 19:43:09 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 19:43:09 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 19:43:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:43:09 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:43:09 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 19:43:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-08 19:43:09 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 19:43:09 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:43:09 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 19:43:09 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:43:10 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:43:10 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 19:43:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:43:13 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:43:13 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:43:13 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:43:13 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:43:13 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:43:13 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:43:13 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:43:13 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:43:13 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:43:13 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:43:13 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:43:13 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:43:13 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:43:13 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:43:13 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:43:13 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:43:13 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:43:13 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:43:13 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:43:13 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:43:13 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:43:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:43:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:43:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:43:13 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:43:13 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:43:13 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:43:13 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:43:13 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:43:13 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:43:13 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:43:13 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:43:13 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:43:13 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:43:13 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:43:13 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:43:13 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:43:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:43:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:43:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:43:13 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:43:13 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:43:13 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:43:13 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:43:13 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:43:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:43:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:43:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:43:13 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:43:13 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:43:13 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:43:13 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:43:13 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:43:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:43:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:43:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:43:13 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:43:13 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:43:13 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:43:13 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:43:13 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:43:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:43:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:43:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:43:13 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:43:13 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:43:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:43:13 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:43:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:43:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:43:13 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:43:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:43:13 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:43:13 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:43:13 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:43:13 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:43:13 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:43:13 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:43:13 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:43:13 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 19:43:13 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:43:13 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:43:13 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 19:43:13 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 19:43:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:43:13 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:43:13 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 19:43:13 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 19:43:13 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 19:43:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:43:13 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:43:13 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:43:13 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:43:13 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:43:13 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:43:13 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 19:43:13 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:43:13 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 19:43:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:43:13 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:43:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:43:13 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:43:13 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:43:13 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:43:13 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:43:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:43:13 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:43:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:43:13 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:43:13 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:43:13 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:43:13 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:43:14 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:43:14 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:43:14 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 19:43:14 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:43:14 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:43:14 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 19:43:14 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 19:43:14 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:43:14 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:43:14 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:43:14 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:43:14 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:43:14 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:43:14 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:43:14 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:43:14 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:43:14 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:43:14 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:43:14 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:43:14 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:43:14 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:43:14 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:43:14 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:43:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:43:14 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:43:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:43:14 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:43:14 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:43:14 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:43:14 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:43:14 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:43:14 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:43:14 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:43:14 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:43:14 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:43:14 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:43:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-08 19:43:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:43:14 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:43:14 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:43:14 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 24 ms +2016-04-08 19:43:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-08 19:43:14 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:43:14 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 19:43:14 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-08 19:43:14 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 19:43:14 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 19:43:14 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 19:43:14 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 19:43:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-08 19:43:14 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 19:43:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-08 19:43:14 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:43:14 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:43:14 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-08 19:43:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-08 19:43:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-08 19:43:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-08 19:43:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:43:14 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:43:14 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:43:14 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:43:14 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:43:14 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:43:14 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:43:14 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:43:14 INFO WorkspaceExplorerServiceImpl:142 - end time - 420 msc 0 sec +2016-04-08 19:43:14 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:44:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:44:00 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:44:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:44:20 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:44:20 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:44:20 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-08 19:44:20 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:44:20 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:44:21 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:44:21 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:44:21 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:44:21 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:44:21 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:44:21 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-08 19:44:21 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-08 19:44:21 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-08 19:44:21 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 19:44:21 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:44:21 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:44:21 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:44:21 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:44:21 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:44:21 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:44:21 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:44:21 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:44:21 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:44:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:44:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:44:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:44:21 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:44:21 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:44:21 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:44:21 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:44:21 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:44:21 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:44:21 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:44:21 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:44:21 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:44:21 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:44:21 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:44:21 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:44:21 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:44:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:44:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:44:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:44:21 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:44:21 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:44:21 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:44:21 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:44:21 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:44:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:44:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:44:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:44:21 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:44:21 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-08 19:44:21 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:44:21 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:44:21 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-08 19:44:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-08 19:44:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-08 19:44:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:44:21 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-08 19:44:21 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-08 19:44:21 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:44:21 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:44:21 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:44:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-08 19:44:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-08 19:44:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:44:21 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:44:21 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-08 19:44:21 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:44:21 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:44:21 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-08 19:44:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-08 19:44:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-08 19:44:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:44:21 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-08 19:44:21 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:44:21 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:44:21 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:44:21 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 19:44:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 19:44:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:44:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:44:21 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 19:44:21 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 19:44:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:44:21 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:44:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:44:21 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:44:21 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:44:21 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:44:21 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:44:21 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:44:21 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:44:21 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:44:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:44:21 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:44:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:44:21 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:44:21 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:44:21 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:44:21 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:44:21 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:44:21 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:44:21 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:44:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:44:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:44:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:44:21 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:44:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:44:21 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:44:21 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:44:21 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:44:21 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:44:21 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:44:21 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:44:21 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:44:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:44:21 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:44:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:44:21 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:44:21 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:44:21 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:44:21 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:44:21 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:44:21 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:44:21 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:44:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:44:21 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:44:21 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:44:21 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:44:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:44:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:44:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:44:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:44:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:44:21 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:44:21 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:44:21 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:44:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:44:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:44:21 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:44:21 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:44:21 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:44:21 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:44:21 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:44:21 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:44:21 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:44:21 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:44:21 INFO WorkspaceExplorerServiceImpl:142 - end time - 221 msc 0 sec +2016-04-08 19:44:21 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:44:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:44:55 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:45:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:45:50 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:46:52 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 19:46:52 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 19:46:52 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 19:46:52 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 19:46:52 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 19:46:52 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:46:52 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 19:46:52 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@6eb7cecf +2016-04-08 19:46:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:46:52 INFO ASLSession:352 - Logging the entrance +2016-04-08 19:46:52 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 19:46:52 DEBUG TemplateModel:83 - 2016-04-08 19:46:52, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 19:46:52 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:46:52 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 19:46:56 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 19:46:56 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 19:46:56 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 19:46:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:46:56 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:46:56 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:46:56 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:46:56 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 125 ms +2016-04-08 19:46:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 19:46:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 19:46:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 19:46:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 19:46:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 19:46:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 19:46:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 19:46:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 19:46:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 19:46:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 19:46:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 19:46:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 19:46:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 19:46:56 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 19:46:56 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 19:46:57 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:46:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:46:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6d5feaec +2016-04-08 19:46:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@3a34c7d7 +2016-04-08 19:46:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@abc73ff +2016-04-08 19:46:57 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@55d59def +2016-04-08 19:46:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 118 ms +2016-04-08 19:46:57 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 19:46:57 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 19:46:57 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 19:46:57 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 19:46:57 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 19:46:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:46:57 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:46:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 19:46:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 21 ms +2016-04-08 19:46:57 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 19:46:57 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:46:57 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 19:46:57 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:46:58 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:46:58 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 19:47:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:47:02 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:47:02 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:47:02 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:47:02 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:47:02 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:47:02 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:47:02 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:47:02 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:47:02 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:47:02 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:47:02 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:47:02 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:47:02 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:47:02 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:47:02 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:47:02 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:47:02 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:47:02 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:47:02 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:47:02 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:47:02 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:47:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:47:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:47:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:47:02 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:47:02 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:47:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:47:02 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:47:02 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:47:02 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:47:02 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:47:02 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:47:02 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:47:02 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:47:02 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:47:02 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:47:02 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:47:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:47:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:47:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:47:02 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:47:02 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:47:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:47:02 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:47:02 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:47:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:47:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:47:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:47:02 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:47:02 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:47:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:47:02 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:47:02 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:47:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:47:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:47:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:47:02 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:47:02 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:47:02 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:47:02 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:47:02 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:47:02 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:47:02 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:47:02 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:47:02 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:47:02 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:47:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:47:03 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:47:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:47:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:47:03 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:47:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:47:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:47:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:47:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:47:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:47:03 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:47:03 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:47:03 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:47:03 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:47:03 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:47:03 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 19:47:03 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 19:47:03 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 19:47:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 19:47:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:47:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 19:47:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 19:47:03 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 19:47:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:47:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:47:03 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:47:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:47:03 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 19:47:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:47:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:47:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:47:03 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 19:47:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:47:03 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:47:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:47:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:47:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:47:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:47:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:47:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:47:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:47:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:47:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:47:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:47:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:47:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:47:03 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:47:03 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:47:03 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 19:47:03 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:47:03 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:47:03 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 19:47:03 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 19:47:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:47:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:47:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:47:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:47:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:47:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:47:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:47:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:47:03 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:47:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:47:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:47:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:47:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:47:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:47:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:47:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:47:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:47:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:47:03 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:47:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:47:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:47:03 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:47:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:47:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:47:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:47:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:47:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:47:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:47:03 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:47:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 19:47:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:47:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:47:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:47:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 31 ms +2016-04-08 19:47:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 19:47:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:47:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 19:47:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 19:47:04 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 19:47:04 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 19:47:04 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 19:47:04 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 19:47:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 19:47:04 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 19:47:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 19:47:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:47:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:47:04 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-08 19:47:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 19:47:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 19:47:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 19:47:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:47:04 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:47:04 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:47:04 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:47:04 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:47:04 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:47:04 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:47:04 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:47:04 INFO WorkspaceExplorerServiceImpl:142 - end time - 434 msc 0 sec +2016-04-08 19:47:04 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:47:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:47:47 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:48:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:48:42 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:48:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:48:56 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:48:56 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:48:56 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-08 19:48:56 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:48:56 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:48:57 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:48:57 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:48:57 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:48:57 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:48:57 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:48:57 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-08 19:48:57 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-08 19:48:57 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-08 19:48:57 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 19:48:57 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:48:57 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:48:57 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:48:57 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:48:57 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:48:57 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:48:57 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:48:57 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:48:57 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:48:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:48:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:48:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:48:57 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:48:57 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:48:57 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:48:57 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:48:57 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:48:57 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:48:57 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:48:57 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:48:57 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:48:57 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:48:57 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:48:57 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:48:57 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:48:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:48:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:48:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:48:57 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:48:57 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:48:57 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:48:57 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:48:57 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:48:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:48:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:48:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:48:57 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:48:57 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-08 19:48:57 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:48:57 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:48:57 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-08 19:48:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-08 19:48:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-08 19:48:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:48:57 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-08 19:48:57 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-08 19:48:57 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:48:57 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:48:57 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:48:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-08 19:48:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-08 19:48:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:48:57 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:48:57 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-08 19:48:57 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:48:57 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:48:57 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-08 19:48:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-08 19:48:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-08 19:48:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:48:57 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-08 19:48:57 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:48:57 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:48:57 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:48:57 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 19:48:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 19:48:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:48:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:48:57 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 19:48:57 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 19:48:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:48:57 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:48:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:48:57 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:48:57 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:48:57 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:48:57 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:48:57 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:48:57 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:48:57 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:48:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:48:57 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:48:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:48:57 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:48:57 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:48:57 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:48:57 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:48:57 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:48:57 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:48:57 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:48:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:48:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:48:57 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:48:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:48:57 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:48:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:48:57 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:48:57 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:48:57 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:48:57 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:48:57 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:48:57 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:48:57 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:48:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:48:57 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:48:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:48:57 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:48:57 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:48:57 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:48:57 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:48:57 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:48:57 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:48:57 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:48:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:48:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:48:57 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:48:57 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:48:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:48:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:48:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:48:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:48:57 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:48:57 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:48:57 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:48:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:48:57 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:48:57 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:48:57 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:48:57 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:48:57 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:48:57 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:48:57 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:48:57 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:48:57 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:48:57 INFO WorkspaceExplorerServiceImpl:142 - end time - 240 msc 0 sec +2016-04-08 19:48:57 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:49:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:49:37 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:50:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:50:32 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:51:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:51:27 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:52:50 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 19:52:50 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 19:52:50 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 19:52:50 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 19:52:50 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 19:52:50 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:52:50 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 19:52:50 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@465b2418 +2016-04-08 19:52:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:52:50 INFO ASLSession:352 - Logging the entrance +2016-04-08 19:52:50 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 19:52:50 DEBUG TemplateModel:83 - 2016-04-08 19:52:50, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 19:52:50 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:52:50 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 19:52:54 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 19:52:54 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 19:52:54 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 19:52:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:52:54 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:52:54 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:52:54 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:52:54 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 118 ms +2016-04-08 19:52:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 19:52:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 19:52:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 19:52:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 19:52:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 19:52:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 19:52:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 19:52:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 19:52:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 19:52:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 19:52:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 19:52:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 19:52:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 19:52:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 19:52:54 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 19:52:54 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:52:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:52:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@34e8b2dd +2016-04-08 19:52:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@ed162af +2016-04-08 19:52:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@734252ee +2016-04-08 19:52:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@62720389 +2016-04-08 19:52:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 119 ms +2016-04-08 19:52:54 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 19:52:54 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 19:52:54 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 19:52:54 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 19:52:54 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 19:52:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:52:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:52:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 19:52:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 28 ms +2016-04-08 19:52:54 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 19:52:54 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:52:54 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 19:52:54 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:52:55 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:52:55 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 19:53:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:53:00 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 19:53:00 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:53:00 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:53:00 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:53:00 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:53:01 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:53:01 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:53:01 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:53:01 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:53:01 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:53:01 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:53:01 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:53:01 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:53:01 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:53:01 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:53:01 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:53:01 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:53:01 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:53:01 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:53:01 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:53:01 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:53:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:53:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:53:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:53:01 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:53:01 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:53:01 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:53:01 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:53:01 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:53:01 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:53:01 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:53:01 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:53:01 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:53:01 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:53:01 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:53:01 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:53:01 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:53:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:53:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:53:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:53:01 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:53:01 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:53:01 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:53:01 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:53:01 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:53:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:53:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:53:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:53:01 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:53:01 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:53:01 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:53:01 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:53:01 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:53:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:53:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:53:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:53:01 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:53:01 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:53:01 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:53:01 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:53:01 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:53:01 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:53:01 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:53:01 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:53:01 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:53:01 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:53:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:53:01 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:53:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:53:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:53:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:53:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:53:01 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:53:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:53:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:53:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:53:01 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:53:01 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:53:01 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:53:01 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 19:53:01 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:53:01 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:53:01 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 19:53:01 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 19:53:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 19:53:01 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:53:01 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 19:53:01 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 21 ms +2016-04-08 19:53:01 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 19:53:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:53:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:53:01 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:53:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:53:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:53:01 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 19:53:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:53:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:53:01 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 19:53:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:53:01 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:53:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:53:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:53:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:53:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:53:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:53:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:53:01 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:53:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:53:01 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:53:01 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:53:01 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:53:01 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:53:01 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:53:01 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:53:01 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 19:53:02 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:53:02 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:53:02 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 19:53:02 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 19:53:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:53:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:53:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:53:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:53:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:53:02 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:53:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:53:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:53:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:53:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:53:02 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:53:02 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:53:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:53:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:53:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:53:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:53:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:53:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:53:02 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:53:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:53:02 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:53:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:53:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:53:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:53:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:53:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:53:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:53:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:53:03 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:53:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 19:53:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:53:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:53:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:53:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 32 ms +2016-04-08 19:53:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 19:53:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:53:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 19:53:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 18 ms +2016-04-08 19:53:03 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 19:53:03 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 19:53:03 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 19:53:03 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 19:53:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 19:53:03 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 19:53:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 19:53:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:53:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:53:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 19:53:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 19:53:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 19:53:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 19:53:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:53:03 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:53:03 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:53:03 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:53:03 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:53:03 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:53:03 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:53:03 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:53:03 INFO WorkspaceExplorerServiceImpl:142 - end time - 1443 msc 1 sec +2016-04-08 19:53:03 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:53:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:53:45 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:54:14 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 19:54:14 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 19:54:14 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 19:54:14 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 19:54:14 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 19:54:14 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:54:14 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 19:54:14 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7a4f8ca1 +2016-04-08 19:54:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:54:14 INFO ASLSession:352 - Logging the entrance +2016-04-08 19:54:14 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 19:54:14 DEBUG TemplateModel:83 - 2016-04-08 19:54:14, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 19:54:14 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:54:14 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 19:54:18 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 19:54:18 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 19:54:18 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 19:54:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:54:18 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:54:18 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:54:18 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:54:18 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 127 ms +2016-04-08 19:54:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 19:54:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 19:54:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 19:54:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 19:54:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 19:54:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 19:54:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 19:54:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 19:54:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 19:54:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 19:54:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 19:54:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 19:54:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 19:54:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 19:54:18 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 19:54:18 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:54:18 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:54:18 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@55580d21 +2016-04-08 19:54:18 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@708a1178 +2016-04-08 19:54:18 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@63ddb204 +2016-04-08 19:54:18 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@529325d3 +2016-04-08 19:54:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 129 ms +2016-04-08 19:54:19 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 19:54:19 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 19:54:19 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 19:54:19 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 19:54:19 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 19:54:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:54:19 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:54:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 19:54:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 37 ms +2016-04-08 19:54:19 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 19:54:19 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:54:19 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 19:54:19 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:54:20 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:54:20 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 19:54:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:54:23 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:54:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:54:23 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:54:23 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:54:23 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:54:23 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:54:23 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:54:23 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:54:23 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:54:23 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:54:23 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:54:23 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:54:23 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:54:23 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:54:23 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:54:23 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:54:23 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:54:23 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:54:23 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:54:23 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:54:23 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:54:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:54:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:54:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:54:23 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:54:23 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:54:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:54:23 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:54:23 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:54:23 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:54:23 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:54:23 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:54:23 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:54:23 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:54:23 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:54:23 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:54:23 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:54:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:54:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:54:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:54:23 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:54:23 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:54:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:54:23 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:54:23 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:54:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:54:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:54:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:54:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:54:23 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:54:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:54:23 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:54:23 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:54:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:54:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:54:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:54:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:54:23 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:54:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:54:23 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:54:23 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:54:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:54:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:54:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:54:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:54:23 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:54:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:54:24 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:54:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:54:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-08 19:54:24 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:54:24 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:54:24 DEBUG ASLSession:458 - Getting security token: null in thread 28 +2016-04-08 19:54:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-08 19:54:24 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:54:24 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:54:24 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:54:24 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:54:24 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:54:24 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 19:54:24 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:54:24 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:54:24 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 19:54:24 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 19:54:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:54:24 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:54:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 19:54:24 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-08 19:54:24 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 19:54:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:54:24 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:54:24 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:54:24 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:54:24 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:54:24 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 19:54:24 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:54:24 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:54:24 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 19:54:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:54:24 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:54:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:54:24 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:54:24 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:54:24 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:54:24 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:54:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:54:24 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:54:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:54:24 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:54:24 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:54:24 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:54:24 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:54:24 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:54:24 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:54:24 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 19:54:24 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:54:24 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:54:24 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 19:54:24 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 19:54:24 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:54:24 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:54:24 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:54:24 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:54:24 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:54:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:54:24 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:54:24 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:54:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:54:24 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:54:24 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:54:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:54:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:54:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:54:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:54:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:54:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-08 19:54:24 DEBUG ASLSession:458 - Getting security token: null in thread 28 +2016-04-08 19:54:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:54:24 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:54:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:54:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:54:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:54:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:54:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:54:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:54:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:54:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:54:24 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:54:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:54:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:54:25 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:54:25 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:54:25 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 30 ms +2016-04-08 19:54:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:54:25 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:54:25 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 19:54:25 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 19:54:25 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 19:54:25 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 19:54:25 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 19:54:25 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 19:54:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:54:25 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 19:54:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:54:25 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:54:25 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:54:25 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 19:54:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:54:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:54:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:54:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:54:25 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:54:25 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:54:25 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:54:25 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:54:25 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:54:25 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:54:25 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:54:25 INFO WorkspaceExplorerServiceImpl:142 - end time - 428 msc 0 sec +2016-04-08 19:54:25 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:55:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:55:09 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:56:06 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 19:56:06 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 19:56:06 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 19:56:06 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 19:56:06 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 19:56:06 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:56:06 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 19:56:06 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@5f4c82d7 +2016-04-08 19:56:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:56:06 INFO ASLSession:352 - Logging the entrance +2016-04-08 19:56:06 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 19:56:06 DEBUG TemplateModel:83 - 2016-04-08 19:56:06, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 19:56:06 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:56:06 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 19:56:09 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 19:56:09 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 19:56:09 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 19:56:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:56:09 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 19:56:09 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:56:09 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:56:10 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 134 ms +2016-04-08 19:56:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 19:56:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 19:56:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 19:56:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 19:56:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 19:56:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 19:56:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 19:56:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 19:56:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 19:56:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 19:56:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 19:56:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 19:56:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 19:56:10 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 19:56:10 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 19:56:10 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:56:10 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:56:10 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6fd257d4 +2016-04-08 19:56:10 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@5a31d727 +2016-04-08 19:56:10 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@32a43ba5 +2016-04-08 19:56:10 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@5e42bef1 +2016-04-08 19:56:10 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 98 ms +2016-04-08 19:56:10 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 19:56:10 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 19:56:10 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 19:56:10 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 19:56:10 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 19:56:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:56:10 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:56:10 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 19:56:10 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-08 19:56:10 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 19:56:10 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:56:10 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 19:56:10 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:56:11 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:56:11 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 19:56:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:56:14 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:56:14 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:56:14 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:56:14 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:56:14 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:56:15 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:56:15 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:56:15 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:56:15 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:56:15 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:56:15 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:56:15 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:56:15 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:56:15 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:56:15 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:56:15 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:56:15 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:56:15 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:56:15 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:56:15 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:56:15 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:56:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:56:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:56:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:56:15 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:56:15 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:56:15 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:56:15 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:56:15 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:56:15 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:56:15 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:56:15 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:56:15 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:56:15 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:56:15 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:56:15 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:56:15 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:56:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:56:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:56:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:56:15 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:56:15 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:56:15 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:56:15 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:56:15 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:56:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:56:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:56:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:56:15 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:56:15 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:56:15 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:56:15 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:56:15 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:56:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:56:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:56:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:56:15 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:56:15 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:56:15 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:56:15 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:56:15 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:56:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:56:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:56:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:56:15 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:56:15 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:56:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:56:15 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 19:56:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:56:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 19:56:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:56:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:56:15 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:56:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:56:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:56:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:56:15 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:56:15 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:56:15 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:56:15 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:56:15 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:56:15 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 19:56:15 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 19:56:15 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 19:56:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 19:56:15 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:56:15 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 19:56:15 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 19:56:15 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 19:56:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:56:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:56:15 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:56:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:56:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:56:15 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 19:56:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:56:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:56:15 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 19:56:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:56:15 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:56:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:56:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:56:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:56:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:56:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:56:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:56:15 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 19:56:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:56:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:56:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:56:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:56:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:56:15 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:56:15 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:56:15 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 19:56:16 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:56:16 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:56:16 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 19:56:16 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 19:56:16 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:56:16 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:56:16 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:56:16 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:56:16 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:56:16 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:56:16 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:56:16 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:56:16 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:56:16 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:56:16 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:56:16 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:56:16 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:56:16 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:56:16 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:56:16 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:56:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:56:16 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:56:16 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:56:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:56:16 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:56:16 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:56:16 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:56:16 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:56:16 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:56:16 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:56:16 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:56:16 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:56:16 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:56:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 19:56:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:56:16 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:56:16 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:56:16 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 23 ms +2016-04-08 19:56:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 19:56:16 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:56:16 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 19:56:16 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 19:56:16 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 19:56:16 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 19:56:16 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 19:56:16 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 19:56:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 19:56:16 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 19:56:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 19:56:16 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:56:16 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:56:16 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 19:56:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 19:56:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 19:56:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 19:56:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 19:56:16 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:56:16 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:56:16 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:56:16 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:56:16 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:56:16 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:56:16 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:56:16 INFO WorkspaceExplorerServiceImpl:142 - end time - 400 msc 0 sec +2016-04-08 19:56:16 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:57:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:57:01 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:57:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:57:56 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:58:22 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 19:58:22 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 19:58:22 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 19:58:23 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 19:58:23 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 19:58:23 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:58:23 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 19:58:23 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@5bec7f18 +2016-04-08 19:58:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:58:23 INFO ASLSession:352 - Logging the entrance +2016-04-08 19:58:23 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 19:58:23 DEBUG TemplateModel:83 - 2016-04-08 19:58:23, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 19:58:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:58:23 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 19:58:26 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 19:58:27 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 19:58:27 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 19:58:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:58:27 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:58:27 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:58:27 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:58:27 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 137 ms +2016-04-08 19:58:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 19:58:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 19:58:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 19:58:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 19:58:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 19:58:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 19:58:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 19:58:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 19:58:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 19:58:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 19:58:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 19:58:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 19:58:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 19:58:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 19:58:27 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 19:58:27 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:58:27 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 19:58:27 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@501162b0 +2016-04-08 19:58:27 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@4e7c2c74 +2016-04-08 19:58:27 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6669ebea +2016-04-08 19:58:27 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@469e42ec +2016-04-08 19:58:27 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 116 ms +2016-04-08 19:58:27 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 19:58:27 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 19:58:27 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 19:58:27 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 19:58:27 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 19:58:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:58:27 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:58:27 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 19:58:27 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 24 ms +2016-04-08 19:58:27 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 19:58:27 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:58:27 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 19:58:27 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:58:28 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:58:28 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 19:58:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 19:58:31 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 19:58:31 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 19:58:31 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 19:58:31 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:58:31 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 19:58:32 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 19:58:32 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 19:58:32 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 19:58:32 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 19:58:32 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 19:58:32 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 19:58:32 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 19:58:32 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 19:58:32 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 19:58:32 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 19:58:32 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 19:58:32 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 19:58:32 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 19:58:32 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 19:58:32 DEBUG WPS2SM:201 - Schema: null +2016-04-08 19:58:32 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 19:58:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 19:58:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 19:58:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:58:32 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 19:58:32 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 19:58:32 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:58:32 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:58:32 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 19:58:32 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 19:58:32 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 19:58:32 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 19:58:32 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 19:58:32 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 19:58:32 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 19:58:32 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 19:58:32 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 19:58:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 19:58:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 19:58:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:58:32 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 19:58:32 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 19:58:32 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:58:32 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 19:58:32 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 19:58:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 19:58:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 19:58:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:58:32 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 19:58:32 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 19:58:32 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:58:32 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:58:32 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 19:58:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 19:58:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 19:58:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:58:32 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 19:58:32 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 19:58:32 DEBUG WPS2SM:93 - WPS type: +2016-04-08 19:58:32 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 19:58:32 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 19:58:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 19:58:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 19:58:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 19:58:32 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 19:58:32 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 19:58:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:58:32 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 19:58:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:58:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:58:32 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:58:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:58:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:58:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:58:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:58:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:58:32 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:58:32 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:58:32 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:58:32 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 19:58:32 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 19:58:32 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 19:58:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 19:58:32 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 19:58:32 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 19:58:32 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:58:32 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 19:58:32 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-08 19:58:32 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 19:58:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 19:58:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:58:32 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 19:58:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:58:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:58:32 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 19:58:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:58:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:58:32 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 19:58:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:58:33 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 19:58:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:58:33 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 19:58:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:58:33 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:58:33 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:58:33 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:58:33 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:58:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:58:33 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 19:58:33 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 19:58:33 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 19:58:33 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 19:58:33 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:58:33 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:58:33 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 19:58:33 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 19:58:33 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 19:58:33 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 19:58:33 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 19:58:33 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:58:33 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:58:33 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 19:58:33 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:58:33 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:58:33 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:58:33 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:58:33 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 19:58:33 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 19:58:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:58:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:58:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:58:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:58:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:58:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:58:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 19:58:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:58:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 19:58:33 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 19:58:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:58:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 19:58:33 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 19:58:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:58:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:58:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:58:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 19:58:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:58:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 19:58:33 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 19:58:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:58:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:58:33 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:58:33 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:58:33 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 30 ms +2016-04-08 19:58:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:58:33 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:58:33 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 19:58:33 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 19:58:33 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 19:58:33 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 19:58:33 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 19:58:33 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 19:58:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:58:33 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 19:58:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:58:33 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 19:58:33 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 19:58:33 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 19:58:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:58:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:58:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 19:58:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 19:58:33 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 19:58:33 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 19:58:33 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 19:58:33 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:58:33 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 19:58:33 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 19:58:33 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 19:58:33 INFO WorkspaceExplorerServiceImpl:142 - end time - 438 msc 0 sec +2016-04-08 19:58:33 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 19:59:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 19:59:17 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:00:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:00:12 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:01:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:01:07 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:01:47 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 20:01:47 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 20:01:47 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 20:01:47 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 20:01:47 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 20:01:47 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:01:47 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 20:01:47 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@4c9f06ba +2016-04-08 20:01:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:01:47 INFO ASLSession:352 - Logging the entrance +2016-04-08 20:01:47 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 20:01:47 DEBUG TemplateModel:83 - 2016-04-08 20:01:47, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 20:01:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:01:47 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 20:01:50 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 20:01:50 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 20:01:50 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 20:01:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:01:50 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:01:50 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:01:50 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:01:51 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 129 ms +2016-04-08 20:01:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 20:01:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 20:01:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 20:01:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 20:01:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 20:01:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 20:01:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 20:01:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 20:01:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 20:01:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 20:01:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 20:01:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 20:01:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 20:01:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 20:01:51 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 20:01:51 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:01:51 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:01:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@705b29a4 +2016-04-08 20:01:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@a26cf9f +2016-04-08 20:01:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@1d4405ea +2016-04-08 20:01:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@79f3f08 +2016-04-08 20:01:51 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 116 ms +2016-04-08 20:01:51 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 20:01:51 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 20:01:51 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 20:01:51 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 20:01:51 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 20:01:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:01:51 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:01:51 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 20:01:51 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 33 ms +2016-04-08 20:01:51 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 20:01:51 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:01:51 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 20:01:51 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:01:52 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:01:52 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 20:01:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:01:55 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:01:55 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:01:55 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 20:01:55 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:01:55 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:01:55 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:01:55 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:01:55 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:01:55 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:01:55 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:01:55 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 20:01:55 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 20:01:55 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:01:55 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:01:55 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:01:55 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:01:55 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:01:55 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:01:55 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:01:55 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:01:55 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:01:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:01:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:01:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:01:55 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:01:55 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:01:55 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:01:55 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:01:55 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:01:55 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:01:55 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:01:55 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:01:55 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:01:55 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:01:55 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:01:55 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:01:55 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:01:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:01:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:01:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:01:55 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:01:55 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:01:55 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:01:55 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:01:55 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:01:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:01:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:01:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:01:55 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:01:55 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 20:01:55 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:01:55 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:01:55 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:01:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 20:01:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 20:01:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:01:55 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:01:55 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:01:55 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:01:55 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:01:55 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 20:01:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 20:01:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:01:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:01:55 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 20:01:55 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 20:01:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:01:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:01:56 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:01:56 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:01:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:01:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:01:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:01:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:01:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:01:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:01:56 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:01:56 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:01:56 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:01:56 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:01:56 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:01:56 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 20:01:56 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 20:01:56 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 20:01:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:01:56 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:01:56 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 20:01:56 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 19 ms +2016-04-08 20:01:56 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 20:01:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:01:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:01:56 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:01:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:01:56 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 20:01:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:01:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:01:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:01:56 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 20:01:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:01:56 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:01:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:01:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:01:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:01:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:01:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:01:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:01:56 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:01:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:01:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:01:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:01:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:01:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:01:56 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:01:56 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:01:56 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 20:01:56 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:01:56 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:01:56 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 20:01:56 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 20:01:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:01:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:01:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:01:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:01:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:01:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:01:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:01:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:01:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:01:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:01:56 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:01:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:01:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:01:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:01:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:01:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:01:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:01:56 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:01:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:01:56 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:01:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:01:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:01:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:01:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:01:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:01:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:01:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:01:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:01:56 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:01:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 20:01:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:01:57 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:01:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:01:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 91 ms +2016-04-08 20:01:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 20:01:57 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:01:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 20:01:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 18 ms +2016-04-08 20:01:57 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 20:01:57 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 20:01:57 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 20:01:57 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 20:01:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 20:01:57 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 20:01:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 20:01:57 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:01:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:01:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-08 20:01:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 20:01:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 20:01:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 20:01:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:01:57 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:01:57 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:01:57 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:01:57 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:01:57 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:01:57 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:01:57 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:01:57 INFO WorkspaceExplorerServiceImpl:142 - end time - 491 msc 0 sec +2016-04-08 20:01:57 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:02:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:02:42 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:03:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:03:37 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:04:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:04:32 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:05:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:05:27 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:06:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:06:22 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:07:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:07:17 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:08:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:08:12 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:09:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:09:07 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:10:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:10:02 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:10:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:10:57 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:11:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:11:52 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:12:59 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 20:12:59 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 20:12:59 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 20:12:59 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 20:12:59 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 20:12:59 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:12:59 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 20:12:59 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2302a74d +2016-04-08 20:12:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:12:59 INFO ASLSession:352 - Logging the entrance +2016-04-08 20:12:59 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 20:12:59 DEBUG TemplateModel:83 - 2016-04-08 20:12:59, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 20:12:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:12:59 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 20:13:02 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 20:13:03 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 20:13:03 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 20:13:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:13:03 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:13:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:13:03 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:13:03 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 132 ms +2016-04-08 20:13:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 20:13:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 20:13:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 20:13:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 20:13:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 20:13:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 20:13:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 20:13:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 20:13:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 20:13:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 20:13:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 20:13:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 20:13:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 20:13:03 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 20:13:03 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 20:13:03 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:13:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:13:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@bc24367 +2016-04-08 20:13:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1fb2535c +2016-04-08 20:13:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@7ac5f438 +2016-04-08 20:13:03 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@7f5bbb70 +2016-04-08 20:13:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 104 ms +2016-04-08 20:13:03 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 20:13:03 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 20:13:03 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 20:13:03 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 20:13:03 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 20:13:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:13:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:13:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 20:13:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 23 ms +2016-04-08 20:13:03 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 20:13:03 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:13:03 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 20:13:03 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:13:04 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:13:04 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 20:13:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:13:07 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:13:07 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:13:07 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 20:13:07 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:13:07 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:13:07 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:13:07 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:13:07 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:13:07 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:13:07 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:13:07 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 20:13:07 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 20:13:07 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:13:07 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:13:07 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:13:07 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:13:07 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:13:07 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:13:07 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:13:07 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:13:07 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:13:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:13:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:13:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:13:07 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:13:07 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:13:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:13:07 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:13:07 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:13:07 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:13:07 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:13:07 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:13:07 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:13:07 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:13:07 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:13:07 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:13:07 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:13:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:13:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:13:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:13:07 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:13:07 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:13:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:13:07 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:13:07 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:13:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:13:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:13:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:13:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:13:07 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 20:13:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:13:07 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:13:07 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:13:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 20:13:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 20:13:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:13:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:13:07 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:13:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:13:07 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:13:07 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 20:13:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 20:13:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:13:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:13:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 20:13:07 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 20:13:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:13:07 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:13:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:13:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:13:07 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 20:13:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:13:07 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:13:07 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:13:07 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:13:07 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:13:07 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:13:07 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:13:07 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:13:07 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 20:13:07 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:13:07 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:13:07 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 20:13:07 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 20:13:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-08 20:13:08 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:13:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 20:13:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 19 ms +2016-04-08 20:13:08 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 20:13:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:13:08 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:13:08 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:13:08 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:13:08 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:13:08 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 20:13:08 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:13:08 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:13:08 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 20:13:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:13:08 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:13:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:13:08 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:13:08 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:13:08 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:13:08 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:13:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:13:08 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:13:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:13:08 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:13:08 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:13:08 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:13:08 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:13:08 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:13:08 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:13:08 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 20:13:08 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:13:08 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:13:08 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 20:13:08 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 20:13:08 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:13:08 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:13:08 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:13:08 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:13:08 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:13:08 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:13:08 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:13:08 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:13:08 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:13:08 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:13:08 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:13:08 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:13:08 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:13:08 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:13:08 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:13:08 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:13:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:13:08 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:13:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:13:08 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 20:13:08 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:13:08 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:13:08 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:13:08 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:13:08 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:13:08 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:13:08 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:13:08 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:13:08 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:13:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:13:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:13:08 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:13:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:13:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 24 ms +2016-04-08 20:13:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:13:08 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:13:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 20:13:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-08 20:13:09 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 20:13:09 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 20:13:09 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 20:13:09 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 20:13:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:13:09 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 20:13:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:13:09 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:13:09 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:13:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 20:13:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:13:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:13:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:13:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:13:09 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:13:09 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:13:09 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:13:09 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:13:09 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:13:09 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:13:09 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:13:09 INFO WorkspaceExplorerServiceImpl:142 - end time - 472 msc 0 sec +2016-04-08 20:13:09 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:13:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:13:30 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:13:30 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:13:30 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-08 20:13:30 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:13:30 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:13:31 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:13:31 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:13:31 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:13:31 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:13:31 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:13:31 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-08 20:13:31 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-08 20:13:31 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-08 20:13:31 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 20:13:31 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:13:31 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:13:31 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:13:31 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:13:31 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:13:31 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:13:31 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:13:31 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:13:31 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:13:31 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:13:31 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:13:31 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:13:31 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:13:31 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:13:31 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:13:31 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:13:31 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:13:31 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:13:31 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:13:31 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:13:31 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:13:31 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:13:31 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:13:31 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:13:31 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:13:31 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:13:31 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:13:31 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:13:31 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:13:31 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:13:31 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:13:31 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:13:31 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:13:31 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:13:31 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:13:31 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:13:31 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:13:31 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-08 20:13:31 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:13:31 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:13:31 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-08 20:13:31 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-08 20:13:31 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-08 20:13:31 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:13:31 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-08 20:13:31 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-08 20:13:31 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:13:31 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:13:31 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:13:31 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-08 20:13:31 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-08 20:13:31 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:13:31 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:13:31 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-08 20:13:31 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:13:31 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:13:31 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-08 20:13:31 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-08 20:13:31 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-08 20:13:31 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:13:31 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-08 20:13:31 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:13:31 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:13:31 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:13:31 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 20:13:31 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 20:13:31 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:13:31 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:13:31 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 20:13:31 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 20:13:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:13:31 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:13:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:13:31 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:13:31 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:13:31 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:13:31 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:13:31 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:13:31 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:13:31 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:13:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:13:31 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:13:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:13:31 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:13:31 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:13:31 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:13:31 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:13:31 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:13:31 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:13:31 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:13:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:13:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:13:31 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:13:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:13:31 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:13:31 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:13:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:13:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:13:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:13:31 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:13:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:13:31 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:13:31 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:13:31 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:13:31 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:13:31 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:13:31 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:13:31 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:13:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:13:31 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 20:13:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:13:31 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:13:31 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:13:31 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:13:31 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:13:31 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:13:31 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:13:31 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:13:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:13:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:13:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:13:31 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 20:13:31 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:13:31 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:13:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:13:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:13:31 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:13:31 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:13:31 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:13:31 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:13:31 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:13:31 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:13:31 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:13:31 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:13:31 INFO WorkspaceExplorerServiceImpl:142 - end time - 193 msc 0 sec +2016-04-08 20:13:31 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:13:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:13:54 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:14:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:14:49 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:16:26 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 20:16:26 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 20:16:26 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 20:16:27 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 20:16:27 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 20:16:27 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:16:27 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 20:16:27 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@37825e2a +2016-04-08 20:16:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:16:27 INFO ASLSession:352 - Logging the entrance +2016-04-08 20:16:27 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 20:16:27 DEBUG TemplateModel:83 - 2016-04-08 20:16:27, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 20:16:27 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:16:27 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 20:16:30 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 20:16:30 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 20:16:30 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 20:16:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:16:30 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:16:30 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:16:30 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:16:30 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 123 ms +2016-04-08 20:16:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 20:16:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 20:16:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 20:16:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 20:16:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 20:16:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 20:16:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 20:16:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 20:16:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 20:16:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 20:16:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 20:16:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 20:16:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 20:16:30 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 20:16:30 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 20:16:31 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:16:31 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:16:31 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@754d24f8 +2016-04-08 20:16:31 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@614c05bf +2016-04-08 20:16:31 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@46421077 +2016-04-08 20:16:31 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@ce62ae5 +2016-04-08 20:16:31 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 98 ms +2016-04-08 20:16:31 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 20:16:31 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 20:16:31 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 20:16:31 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 20:16:31 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 20:16:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:16:31 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:16:31 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 20:16:31 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 25 ms +2016-04-08 20:16:31 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 20:16:31 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:16:31 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 20:16:31 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:16:32 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:16:32 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 20:16:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:16:35 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:16:35 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:16:35 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 20:16:35 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:16:35 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:16:35 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:16:35 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:16:35 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:16:35 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:16:35 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:16:35 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 20:16:35 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 20:16:35 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:16:35 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:16:35 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:16:35 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:16:35 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:16:35 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:16:35 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:16:35 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:16:35 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:16:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:16:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:16:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:16:35 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:16:35 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:16:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:16:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:16:35 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:16:35 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:16:35 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:16:35 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:16:35 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:16:35 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:16:35 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:16:35 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:16:35 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:16:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:16:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:16:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:16:35 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:16:35 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:16:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:16:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:16:35 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:16:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:16:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:16:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:16:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:16:35 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 20:16:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:16:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:16:35 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:16:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 20:16:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 20:16:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:16:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:16:35 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:16:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:16:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:16:35 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 20:16:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 20:16:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:16:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:16:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 20:16:35 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 20:16:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:16:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:16:35 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:16:35 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:16:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:16:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:16:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:16:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:16:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:16:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:16:35 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:16:35 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:16:35 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:16:35 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:16:35 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:16:35 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 20:16:35 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 20:16:35 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 20:16:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 20:16:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:16:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 20:16:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 21 ms +2016-04-08 20:16:35 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 20:16:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:16:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:16:35 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:16:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:16:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:16:35 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 20:16:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:16:35 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 20:16:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:16:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:16:36 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:16:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:16:36 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:16:36 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:16:36 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:16:36 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:16:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:16:36 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:16:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:16:36 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:16:36 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:16:36 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:16:36 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:16:36 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:16:36 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:16:36 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 20:16:36 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:16:36 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:16:36 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 20:16:36 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 20:16:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:16:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:16:36 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:16:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:16:36 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:16:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:16:36 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:16:36 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:16:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:16:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:16:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:16:36 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:16:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:16:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:16:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:16:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:16:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:16:36 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:16:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:16:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:16:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:16:36 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:16:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:16:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:16:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:16:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:16:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:16:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:16:36 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:16:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:16:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:16:36 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:16:36 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:16:36 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 31 ms +2016-04-08 20:16:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:16:36 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:16:36 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 20:16:36 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 20:16:36 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 20:16:36 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 20:16:36 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 20:16:36 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 20:16:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:16:36 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 20:16:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:16:36 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:16:36 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:16:36 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 20:16:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:16:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:16:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:16:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:16:36 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:16:36 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:16:36 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:16:36 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:16:36 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:16:36 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:16:36 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:16:36 INFO WorkspaceExplorerServiceImpl:142 - end time - 409 msc 0 sec +2016-04-08 20:16:36 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:16:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:16:49 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 20:16:49 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:16:49 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-08 20:16:49 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:16:49 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:16:49 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:16:49 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:16:49 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:16:49 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:16:49 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:16:49 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-08 20:16:49 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-08 20:16:49 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-08 20:16:49 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 20:16:49 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:16:49 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:16:49 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:16:49 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:16:49 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:16:49 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:16:49 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:16:49 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:16:49 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:16:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:16:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:16:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:16:49 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:16:49 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:16:49 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:16:49 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:16:49 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:16:49 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:16:49 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:16:49 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:16:49 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:16:49 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:16:49 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:16:49 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:16:49 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:16:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:16:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:16:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:16:49 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:16:49 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:16:49 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:16:49 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:16:49 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:16:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:16:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:16:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:16:49 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:16:49 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-08 20:16:49 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:16:49 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:16:49 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-08 20:16:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-08 20:16:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-08 20:16:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:16:49 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-08 20:16:49 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-08 20:16:49 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:16:49 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:16:49 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:16:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-08 20:16:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-08 20:16:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:16:49 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:16:49 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-08 20:16:49 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:16:49 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:16:49 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-08 20:16:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-08 20:16:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-08 20:16:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:16:49 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-08 20:16:49 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:16:49 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:16:49 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:16:49 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 20:16:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 20:16:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:16:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:16:49 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 20:16:49 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 20:16:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:16:49 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:16:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:16:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:16:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:16:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:16:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:16:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:16:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:16:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:16:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:16:49 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:16:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:16:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:16:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:16:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:16:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:16:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:16:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:16:49 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:16:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:16:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:16:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:16:49 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:16:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:16:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:16:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:16:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:16:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:16:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:16:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:16:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:16:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:16:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:16:49 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:16:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:16:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:16:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:16:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:16:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:16:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:16:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:16:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:16:49 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:16:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:16:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:16:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:16:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:16:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:16:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:16:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:16:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:16:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:16:50 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:16:50 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:16:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:16:50 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:16:50 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:16:50 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:16:50 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:16:50 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:16:50 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:16:50 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:16:50 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:16:50 INFO WorkspaceExplorerServiceImpl:142 - end time - 204 msc 0 sec +2016-04-08 20:16:50 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:17:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:17:22 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:17:55 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 20:17:55 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 20:17:55 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 20:17:55 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 20:17:55 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 20:17:55 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:17:55 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 20:17:55 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@45192510 +2016-04-08 20:17:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:17:55 INFO ASLSession:352 - Logging the entrance +2016-04-08 20:17:55 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 20:17:55 DEBUG TemplateModel:83 - 2016-04-08 20:17:55, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 20:17:55 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:17:55 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 20:17:58 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 20:17:58 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 20:17:58 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 20:17:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:17:58 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:17:58 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:17:58 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:17:59 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 133 ms +2016-04-08 20:17:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 20:17:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 20:17:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 20:17:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 20:17:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 20:17:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 20:17:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 20:17:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 20:17:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 20:17:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 20:17:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 20:17:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 20:17:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 20:17:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 20:17:59 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 20:17:59 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:17:59 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:17:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@3f1d72ee +2016-04-08 20:17:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1717dde0 +2016-04-08 20:17:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@2abb0401 +2016-04-08 20:17:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@660b8e05 +2016-04-08 20:17:59 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 169 ms +2016-04-08 20:17:59 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 20:17:59 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 20:17:59 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 20:17:59 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 20:17:59 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 20:17:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:17:59 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:17:59 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 20:17:59 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 30 ms +2016-04-08 20:17:59 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 20:17:59 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:17:59 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 20:17:59 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:18:00 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:18:00 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 20:18:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:18:02 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:18:02 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:18:02 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 20:18:02 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:18:02 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:18:03 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:18:03 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:18:03 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:18:03 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:18:03 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:18:03 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 20:18:03 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 20:18:03 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:18:03 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:18:03 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:18:03 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:18:03 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:18:03 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:18:03 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:18:03 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:18:03 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:18:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:18:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:18:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:18:03 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:18:03 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:18:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:18:03 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:18:03 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:18:03 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:18:03 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:18:03 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:18:03 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:18:03 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:18:03 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:18:03 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:18:03 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:18:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:18:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:18:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:18:03 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:18:03 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:18:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:18:03 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:18:03 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:18:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:18:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:18:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:18:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:18:03 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 20:18:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:18:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:18:03 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:18:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 20:18:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 20:18:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:18:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:18:03 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:18:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:18:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:18:03 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 20:18:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 20:18:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:18:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:18:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 20:18:03 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 20:18:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:18:03 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:18:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:18:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:18:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:18:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:18:03 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:18:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:18:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:18:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:18:03 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:18:03 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:18:03 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:18:03 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:18:03 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:18:03 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 20:18:03 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 20:18:03 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 20:18:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 20:18:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:18:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 20:18:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 20:18:03 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 20:18:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:18:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:18:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:18:03 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 20:18:03 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:18:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:18:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:18:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:18:03 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 20:18:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:18:03 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:18:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:18:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:18:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:18:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:18:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:18:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:18:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:18:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:18:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:18:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:18:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:18:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:18:03 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:18:04 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:18:04 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 20:18:04 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:18:04 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:18:04 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 20:18:04 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 20:18:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:18:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:18:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:18:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:18:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:18:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:18:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:18:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:18:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:18:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:18:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:18:04 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:18:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:18:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:18:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:18:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:18:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:18:04 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:18:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:18:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:18:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:18:04 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:18:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:18:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:18:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:18:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:18:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:18:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:18:04 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:18:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:18:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:18:04 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:18:04 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:18:05 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 1045 ms +2016-04-08 20:18:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:18:05 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:18:05 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 20:18:05 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-08 20:18:05 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 20:18:05 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 20:18:05 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 20:18:05 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 20:18:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:18:05 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 20:18:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:18:05 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:18:05 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:18:05 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 20:18:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:18:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:18:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:18:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:18:05 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:18:05 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:18:05 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:18:05 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:18:05 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:18:05 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:18:05 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:18:05 INFO WorkspaceExplorerServiceImpl:142 - end time - 1423 msc 1 sec +2016-04-08 20:18:05 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:18:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:18:50 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:19:35 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 20:19:35 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 20:19:35 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 20:19:35 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 20:19:35 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 20:19:35 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:19:35 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 20:19:35 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@1d87bc69 +2016-04-08 20:19:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:19:35 INFO ASLSession:352 - Logging the entrance +2016-04-08 20:19:35 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 20:19:35 DEBUG TemplateModel:83 - 2016-04-08 20:19:35, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 20:19:35 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:19:35 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 20:19:38 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 20:19:38 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 20:19:38 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 20:19:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:19:38 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:19:38 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:19:38 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:19:38 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 127 ms +2016-04-08 20:19:38 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 20:19:38 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 20:19:38 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 20:19:38 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 20:19:38 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 20:19:38 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 20:19:38 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 20:19:38 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 20:19:38 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 20:19:38 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 20:19:38 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 20:19:38 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 20:19:38 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 20:19:38 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 20:19:38 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 20:19:38 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:19:39 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:19:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@16a95272 +2016-04-08 20:19:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@5a6e08a4 +2016-04-08 20:19:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@4d15562 +2016-04-08 20:19:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@25206c3e +2016-04-08 20:19:39 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 102 ms +2016-04-08 20:19:39 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 20:19:39 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 20:19:39 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 20:19:39 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 20:19:39 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 20:19:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:19:39 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:19:39 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 20:19:39 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 21 ms +2016-04-08 20:19:39 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 20:19:39 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:19:39 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 20:19:39 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:19:39 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:19:39 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 20:19:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:19:42 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 20:19:42 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:19:42 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 20:19:42 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:19:42 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:19:42 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:19:42 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:19:42 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:19:42 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:19:42 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:19:42 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 20:19:42 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 20:19:42 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:19:42 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:19:42 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:19:42 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:19:42 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:19:42 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:19:42 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:19:42 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:19:42 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:19:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:19:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:19:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:19:42 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:19:42 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:19:42 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:19:42 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:19:42 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:19:42 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:19:42 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:19:42 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:19:42 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:19:42 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:19:42 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:19:42 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:19:42 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:19:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:19:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:19:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:19:42 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:19:42 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:19:42 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:19:42 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:19:42 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:19:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:19:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:19:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:19:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:19:42 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 20:19:42 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:19:42 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:19:42 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:19:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 20:19:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 20:19:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:19:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:19:42 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:19:42 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:19:42 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:19:42 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 20:19:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 20:19:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:19:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:19:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 20:19:42 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 20:19:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:19:42 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:19:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:19:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:19:42 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:19:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:19:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:19:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:19:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:19:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:19:42 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:19:42 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:19:42 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:19:42 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:19:42 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 20:19:42 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 20:19:42 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 20:19:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:19:42 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:19:42 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:19:42 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 20:19:42 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 20:19:43 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 20:19:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:19:43 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:19:43 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:19:43 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:19:43 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:19:43 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 20:19:43 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:19:43 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:19:43 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 20:19:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:19:43 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:19:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:19:43 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:19:43 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:19:43 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:19:43 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:19:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:19:43 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:19:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:19:43 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:19:43 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:19:43 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:19:43 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:19:43 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:19:43 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:19:43 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 20:19:43 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:19:43 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:19:43 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 20:19:43 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 20:19:43 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:19:43 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:19:43 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:19:43 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:19:43 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:19:43 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:19:43 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:19:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:19:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:19:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:19:43 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:19:43 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:19:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:19:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:19:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:19:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:19:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:19:43 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:19:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:19:43 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:19:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:19:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:19:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:19:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:19:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:19:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:19:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:19:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:19:43 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:19:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 20:19:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:19:43 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:19:43 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:19:43 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 21 ms +2016-04-08 20:19:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 20:19:43 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:19:43 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 20:19:43 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 20:19:43 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 20:19:43 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 20:19:43 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 20:19:43 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 20:19:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 20:19:43 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 20:19:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 20:19:43 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:19:43 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:19:43 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 20:19:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 20:19:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 20:19:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 20:19:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:19:43 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:19:43 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:19:43 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:19:43 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:19:43 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:19:43 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:19:43 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:19:43 INFO WorkspaceExplorerServiceImpl:142 - end time - 400 msc 0 sec +2016-04-08 20:19:43 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:20:56 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 20:20:56 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 20:20:56 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 20:20:56 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 20:20:56 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 20:20:56 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:20:56 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 20:20:56 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@44d934e2 +2016-04-08 20:20:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:20:56 INFO ASLSession:352 - Logging the entrance +2016-04-08 20:20:56 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 20:20:56 DEBUG TemplateModel:83 - 2016-04-08 20:20:56, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 20:20:56 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:20:56 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 20:21:00 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 20:21:00 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 20:21:00 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 20:21:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:21:00 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:21:00 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:21:00 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:21:00 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 111 ms +2016-04-08 20:21:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 20:21:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 20:21:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 20:21:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 20:21:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 20:21:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 20:21:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 20:21:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 20:21:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 20:21:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 20:21:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 20:21:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 20:21:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 20:21:01 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 20:21:01 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 20:21:01 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:21:01 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:21:01 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6100cdfb +2016-04-08 20:21:01 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@2cecec49 +2016-04-08 20:21:01 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@7008383e +2016-04-08 20:21:01 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@44912150 +2016-04-08 20:21:01 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 101 ms +2016-04-08 20:21:01 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 20:21:01 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 20:21:01 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 20:21:01 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 20:21:01 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 20:21:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:21:01 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:21:01 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 20:21:01 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 28 ms +2016-04-08 20:21:01 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 20:21:01 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:21:01 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 20:21:01 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:21:02 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:21:02 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 20:21:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:21:05 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:21:05 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:21:05 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 20:21:05 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:21:05 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:21:06 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:21:06 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:21:06 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:21:06 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:21:06 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:21:06 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 20:21:06 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 20:21:06 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:21:06 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:21:06 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:21:06 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:21:06 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:21:06 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:21:06 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:21:06 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:21:06 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:21:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:21:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:21:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:21:06 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:21:06 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:21:06 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:21:06 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:21:06 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:21:06 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:21:06 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:21:06 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:21:06 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:21:06 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:21:06 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:21:06 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:21:06 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:21:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:21:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:21:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:21:06 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:21:06 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:21:06 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:21:06 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:21:06 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:21:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:21:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:21:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:21:06 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:21:06 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 20:21:06 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:21:06 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:21:06 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:21:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 20:21:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 20:21:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:21:06 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:21:06 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:21:06 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:21:06 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:21:06 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 20:21:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 20:21:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:21:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:21:06 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 20:21:06 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 20:21:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:21:06 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:21:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:21:06 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:21:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:21:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:21:06 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:21:06 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:21:06 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:21:06 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:21:06 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:21:06 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:21:06 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:21:06 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:21:06 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:21:06 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 20:21:06 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 20:21:06 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 20:21:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:21:06 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:21:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 20:21:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-08 20:21:06 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 20:21:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:21:06 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:21:06 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:21:06 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:21:06 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 20:21:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:21:06 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 20:21:06 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:21:06 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:21:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:21:06 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 20:21:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:21:06 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:21:06 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:21:06 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:21:06 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:21:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:21:06 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:21:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:21:06 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:21:06 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:21:06 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:21:06 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:21:06 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:21:06 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:21:06 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 20:21:06 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:21:06 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:21:06 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 20:21:06 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 20:21:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:21:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:21:06 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:21:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:21:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:21:06 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:21:06 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:21:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:21:06 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:21:06 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:21:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:21:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:21:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:21:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:21:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:21:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:21:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:21:07 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:21:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:21:07 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 20:21:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:21:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:21:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:21:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:21:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:21:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:21:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:21:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:21:07 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:21:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:21:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:21:07 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:21:07 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:21:07 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 29 ms +2016-04-08 20:21:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:21:07 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:21:07 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 20:21:07 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 20:21:07 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 20:21:07 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 20:21:07 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 20:21:07 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 20:21:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:21:07 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 20:21:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:21:07 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:21:07 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:21:07 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-08 20:21:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:21:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:21:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:21:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:21:07 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:21:07 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:21:07 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:21:07 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:21:07 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:21:07 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:21:07 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:21:07 INFO WorkspaceExplorerServiceImpl:142 - end time - 425 msc 0 sec +2016-04-08 20:21:07 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:21:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:21:20 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:21:20 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:21:20 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-08 20:21:20 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:21:20 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:21:21 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:21:21 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:21:21 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:21:21 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:21:21 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:21:21 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-08 20:21:21 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-08 20:21:21 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-08 20:21:21 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 20:21:21 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:21:21 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:21:21 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:21:21 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:21:21 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:21:21 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:21:21 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:21:21 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:21:21 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:21:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:21:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:21:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:21:21 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:21:21 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:21:21 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:21:21 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:21:21 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:21:21 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:21:21 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:21:21 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:21:21 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:21:21 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:21:21 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:21:21 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:21:21 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:21:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:21:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:21:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:21:21 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:21:21 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:21:21 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:21:21 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:21:21 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:21:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:21:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:21:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:21:21 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:21:21 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-08 20:21:21 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:21:21 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:21:21 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-08 20:21:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-08 20:21:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-08 20:21:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:21:21 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-08 20:21:21 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-08 20:21:21 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:21:21 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:21:21 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:21:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-08 20:21:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-08 20:21:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:21:21 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:21:21 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-08 20:21:21 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:21:21 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:21:21 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-08 20:21:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-08 20:21:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-08 20:21:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:21:21 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-08 20:21:21 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:21:21 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:21:21 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:21:21 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 20:21:21 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 20:21:21 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:21:21 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:21:21 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 20:21:21 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 20:21:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:21:21 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:21:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:21:21 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:21:21 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:21:21 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:21:21 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:21:21 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:21:21 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:21:21 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:21:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:21:21 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:21:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:21:21 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:21:21 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:21:21 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:21:21 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:21:21 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:21:21 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:21:21 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:21:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:21:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:21:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:21:21 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:21:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:21:21 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:21:21 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:21:21 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:21:21 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:21:21 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:21:21 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:21:21 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:21:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:21:21 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:21:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:21:21 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 20:21:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:21:21 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:21:21 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:21:21 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:21:21 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:21:21 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:21:21 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:21:21 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:21:21 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:21:21 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:21:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:21:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:21:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:21:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:21:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:21:21 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 20:21:21 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:21:21 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:21:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:21:21 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:21:21 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:21:21 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:21:21 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:21:21 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:21:21 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:21:21 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:21:21 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:21:21 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:21:21 INFO WorkspaceExplorerServiceImpl:142 - end time - 213 msc 0 sec +2016-04-08 20:21:21 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:21:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:21:51 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 20:22:36 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 20:22:36 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 20:22:36 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 20:22:36 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 20:22:36 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 20:22:36 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:22:36 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 20:22:36 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7866fbcd +2016-04-08 20:22:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:22:36 INFO ASLSession:352 - Logging the entrance +2016-04-08 20:22:36 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 20:22:36 DEBUG TemplateModel:83 - 2016-04-08 20:22:36, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 20:22:36 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:22:36 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 20:22:39 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 20:22:39 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 20:22:39 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 20:22:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:22:39 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:22:39 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:22:39 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:22:39 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 129 ms +2016-04-08 20:22:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 20:22:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 20:22:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 20:22:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 20:22:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 20:22:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 20:22:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 20:22:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 20:22:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 20:22:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 20:22:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 20:22:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 20:22:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 20:22:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 20:22:40 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 20:22:40 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:22:40 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:22:40 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@3c47e79c +2016-04-08 20:22:40 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@5d873166 +2016-04-08 20:22:40 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@401a0d22 +2016-04-08 20:22:40 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@42c5461c +2016-04-08 20:22:40 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 119 ms +2016-04-08 20:22:40 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 20:22:40 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 20:22:40 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 20:22:40 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 20:22:40 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 20:22:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:22:40 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:22:40 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 20:22:40 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 34 ms +2016-04-08 20:22:40 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 20:22:40 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:22:40 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 20:22:40 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:22:41 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:22:41 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 20:22:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:22:45 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:22:45 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:22:45 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 20:22:45 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:22:45 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:22:46 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:22:46 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:22:46 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:22:46 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:22:46 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:22:46 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 20:22:46 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 20:22:46 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:22:46 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:22:46 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:22:46 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:22:46 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:22:46 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:22:46 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:22:46 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:22:46 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:22:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:22:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:22:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:22:46 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:22:46 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:22:46 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:22:46 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:22:46 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:22:46 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:22:46 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:22:46 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:22:46 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:22:46 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:22:46 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:22:46 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:22:46 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:22:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:22:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:22:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:22:46 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:22:46 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:22:46 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:22:46 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:22:46 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:22:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:22:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:22:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:22:46 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:22:46 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 20:22:46 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:22:46 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:22:46 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:22:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 20:22:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 20:22:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:22:46 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:22:46 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:22:46 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:22:46 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:22:46 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 20:22:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 20:22:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:22:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:22:46 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 20:22:46 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 20:22:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:22:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:22:46 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:22:46 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:22:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:22:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:22:46 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:22:46 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:22:46 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:22:46 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:22:46 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:22:46 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:22:46 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:22:46 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:22:46 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:22:46 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 20:22:46 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 20:22:46 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 20:22:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:22:46 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:22:46 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 20:22:46 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 19 ms +2016-04-08 20:22:46 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 20:22:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:22:46 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:22:46 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:22:46 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:22:46 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 20:22:46 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:22:46 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:22:46 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:22:46 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 20:22:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:22:46 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:22:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:22:46 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:22:46 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:22:46 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:22:46 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:22:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:22:46 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:22:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:22:46 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:22:46 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:22:46 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:22:46 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:22:46 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:22:47 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:22:47 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 20:22:47 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:22:47 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:22:47 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 20:22:47 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 20:22:47 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:22:47 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:22:47 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:22:47 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:22:47 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:22:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:22:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:22:47 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:22:47 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:22:47 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:22:47 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:22:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:22:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:22:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:22:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:22:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:22:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:22:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:22:47 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:22:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:22:47 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:22:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:22:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:22:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:22:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:22:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:22:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:22:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:22:47 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:22:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:22:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:22:47 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:22:47 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:22:47 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 33 ms +2016-04-08 20:22:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:22:47 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:22:47 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 20:22:47 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 20:22:47 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 20:22:47 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 20:22:47 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 20:22:47 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 20:22:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:22:47 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 20:22:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:22:47 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:22:47 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:22:47 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 20:22:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:22:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:22:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:22:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:22:47 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:22:47 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:22:47 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:22:47 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:22:47 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:22:47 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:22:47 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:22:47 INFO WorkspaceExplorerServiceImpl:142 - end time - 427 msc 0 sec +2016-04-08 20:22:47 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:23:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:23:31 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:24:02 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 20:24:02 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 20:24:02 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 20:24:02 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 20:24:02 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 20:24:02 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:24:02 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 20:24:02 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@a7150b6 +2016-04-08 20:24:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:24:02 INFO ASLSession:352 - Logging the entrance +2016-04-08 20:24:02 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 20:24:02 DEBUG TemplateModel:83 - 2016-04-08 20:24:02, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 20:24:02 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:24:02 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 20:24:05 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 20:24:05 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 20:24:05 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 20:24:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:24:05 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:24:05 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:24:05 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:24:05 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 132 ms +2016-04-08 20:24:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 20:24:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 20:24:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 20:24:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 20:24:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 20:24:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 20:24:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 20:24:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 20:24:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 20:24:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 20:24:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 20:24:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 20:24:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 20:24:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 20:24:05 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 20:24:05 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:24:05 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:24:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6bc6f4c8 +2016-04-08 20:24:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@3387d243 +2016-04-08 20:24:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@27399762 +2016-04-08 20:24:05 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@4d527dce +2016-04-08 20:24:05 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 103 ms +2016-04-08 20:24:06 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 20:24:06 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 20:24:06 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 20:24:06 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 20:24:06 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 20:24:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:24:06 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:24:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 20:24:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 19 ms +2016-04-08 20:24:06 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 20:24:06 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:24:06 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 20:24:06 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:24:06 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:24:06 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 20:24:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:24:10 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:24:10 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:24:10 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 20:24:10 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:24:10 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:24:10 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:24:10 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:24:10 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:24:10 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:24:10 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:24:10 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 20:24:10 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 20:24:10 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:24:10 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:24:10 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:24:10 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:24:10 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:24:10 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:24:10 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:24:10 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:24:10 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:24:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:24:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:24:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:24:10 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:24:10 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:24:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:24:10 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:24:10 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:24:10 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:24:10 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:24:10 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:24:10 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:24:10 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:24:10 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:24:10 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:24:10 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:24:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:24:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:24:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:24:10 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:24:10 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:24:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:24:10 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:24:10 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:24:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:24:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:24:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:24:10 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:24:10 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 20:24:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:24:10 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:24:10 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:24:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 20:24:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 20:24:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:24:10 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:24:10 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:24:10 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:24:10 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:24:10 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 20:24:10 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 20:24:10 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:24:10 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:24:10 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 20:24:10 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 20:24:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:24:10 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-08 20:24:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:24:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:24:10 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:24:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:24:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:24:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:24:10 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:24:10 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:24:10 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:24:10 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:24:10 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:24:10 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:24:10 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:24:10 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 20:24:10 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 20:24:10 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 20:24:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 30 +2016-04-08 20:24:10 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:24:10 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 20:24:10 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-08 20:24:11 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 20:24:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-08 20:24:11 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:24:11 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:24:11 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 20:24:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:24:11 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 20:24:11 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:24:11 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:24:11 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:24:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:24:11 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:24:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:24:11 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:24:11 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:24:11 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:24:11 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:24:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:24:11 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:24:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:24:11 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:24:11 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:24:11 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:24:11 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:24:11 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:24:11 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:24:11 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 20:24:11 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:24:11 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:24:11 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 20:24:11 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 20:24:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:24:11 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:24:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:24:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:24:11 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:24:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:24:11 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:24:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:24:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:24:11 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:24:11 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:24:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:24:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:24:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:24:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:24:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:24:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:24:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:24:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:24:11 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:24:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:24:11 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:24:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:24:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:24:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:24:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:24:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:24:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:24:11 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:24:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 20:24:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:24:11 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:24:11 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:24:11 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 38 ms +2016-04-08 20:24:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 20:24:11 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:24:11 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 20:24:11 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 20:24:11 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 20:24:11 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 20:24:11 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 20:24:11 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 20:24:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 20:24:11 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 20:24:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 20:24:11 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:24:11 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:24:11 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-08 20:24:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 20:24:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 20:24:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 20:24:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:24:11 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:24:11 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:24:11 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:24:11 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:24:11 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:24:11 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:24:11 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:24:11 INFO WorkspaceExplorerServiceImpl:142 - end time - 409 msc 0 sec +2016-04-08 20:24:11 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:24:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:24:57 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:25:39 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 20:25:39 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 20:25:39 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 20:25:39 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 20:25:39 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 20:25:39 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:25:39 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 20:25:39 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@49be592d +2016-04-08 20:25:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:25:39 INFO ASLSession:352 - Logging the entrance +2016-04-08 20:25:39 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 20:25:39 DEBUG TemplateModel:83 - 2016-04-08 20:25:39, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 20:25:39 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:25:39 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 20:25:42 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 20:25:42 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 20:25:42 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 20:25:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:25:42 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:25:42 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:25:42 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:25:42 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 126 ms +2016-04-08 20:25:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 20:25:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 20:25:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 20:25:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 20:25:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 20:25:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 20:25:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 20:25:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 20:25:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 20:25:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 20:25:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 20:25:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 20:25:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 20:25:42 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 20:25:42 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 20:25:43 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:25:43 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:25:43 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@708a1178 +2016-04-08 20:25:43 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@26d828b7 +2016-04-08 20:25:43 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@5a31d727 +2016-04-08 20:25:43 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@61666b97 +2016-04-08 20:25:43 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 104 ms +2016-04-08 20:25:43 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 20:25:43 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 20:25:43 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 20:25:43 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 20:25:43 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 20:25:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:25:43 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:25:43 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 20:25:43 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 26 ms +2016-04-08 20:25:43 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 20:25:43 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:25:43 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 20:25:43 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:25:44 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:25:44 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 20:25:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:25:47 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:25:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:25:47 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 20:25:47 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:25:47 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:25:47 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:25:47 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:25:47 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:25:47 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:25:47 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:25:47 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 20:25:47 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 20:25:47 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:25:47 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:25:47 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:25:47 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:25:47 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:25:47 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:25:47 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:25:47 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:25:47 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:25:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:25:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:25:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:25:47 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:25:47 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:25:47 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:25:47 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:25:47 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:25:47 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:25:47 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:25:47 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:25:47 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:25:47 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:25:47 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:25:47 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:25:47 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:25:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:25:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:25:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:25:47 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:25:47 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:25:47 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:25:47 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:25:47 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:25:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:25:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:25:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:25:47 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:25:47 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 20:25:47 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:25:47 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:25:47 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:25:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 20:25:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 20:25:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:25:47 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:25:47 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:25:47 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:25:47 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:25:47 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 20:25:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 20:25:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:25:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:25:47 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 20:25:47 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 20:25:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:25:47 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:25:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:25:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:25:47 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:25:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:25:47 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:25:47 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:25:47 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:25:47 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:25:47 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:25:47 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:25:47 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:25:47 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 20:25:47 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:25:47 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:25:47 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 20:25:47 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 20:25:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 20:25:47 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:25:47 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 20:25:47 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 20:25:48 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 20:25:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:25:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:25:48 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:25:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:25:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:25:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:25:48 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 20:25:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:25:48 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 20:25:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:25:48 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:25:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:25:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:25:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:25:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:25:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:25:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:25:48 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:25:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:25:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:25:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:25:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:25:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:25:48 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:25:48 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:25:48 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 20:25:48 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:25:48 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:25:48 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 20:25:48 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 20:25:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:25:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:25:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:25:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:25:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:25:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:25:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:25:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:25:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:25:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:25:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:25:48 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:25:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:25:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:25:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:25:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:25:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:25:48 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:25:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:25:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:25:48 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:25:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:25:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:25:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:25:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:25:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:25:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:25:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:25:48 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:25:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:25:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:25:48 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:25:48 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:25:48 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 29 ms +2016-04-08 20:25:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:25:48 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:25:48 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 20:25:48 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 20:25:48 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 20:25:48 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 20:25:48 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 20:25:48 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 20:25:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:25:48 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 20:25:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:25:48 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:25:48 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:25:48 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 20:25:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:25:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:25:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-08 20:25:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:25:48 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:25:48 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:25:48 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:25:48 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:25:48 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:25:48 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:25:48 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:25:48 INFO WorkspaceExplorerServiceImpl:142 - end time - 434 msc 0 sec +2016-04-08 20:25:48 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:25:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:25:59 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:25:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:25:59 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-08 20:25:59 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:25:59 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:25:59 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:25:59 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:25:59 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:25:59 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:25:59 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:25:59 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-08 20:25:59 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-08 20:25:59 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-08 20:25:59 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 20:25:59 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:25:59 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:25:59 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:25:59 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:25:59 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:25:59 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:25:59 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:25:59 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:25:59 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:25:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:25:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:25:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:25:59 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:25:59 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:25:59 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:25:59 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:25:59 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:25:59 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:25:59 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:25:59 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:25:59 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:25:59 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:25:59 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:25:59 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:25:59 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:25:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:25:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:25:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:25:59 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:25:59 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:25:59 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:25:59 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:25:59 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:25:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:25:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:25:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:25:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:25:59 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-08 20:25:59 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:25:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:25:59 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-08 20:25:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-08 20:25:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-08 20:25:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:25:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-08 20:25:59 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-08 20:25:59 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:25:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:25:59 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:25:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-08 20:25:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-08 20:25:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:25:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:25:59 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-08 20:25:59 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:25:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:25:59 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-08 20:25:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-08 20:25:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-08 20:25:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:25:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-08 20:25:59 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:25:59 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:25:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:25:59 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 20:25:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 20:25:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:25:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:25:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 20:25:59 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 20:25:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:25:59 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:25:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:25:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:25:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:25:59 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:25:59 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:25:59 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:25:59 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:25:59 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:25:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:25:59 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:25:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:25:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:25:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:25:59 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:25:59 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:25:59 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:25:59 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:25:59 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:25:59 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:25:59 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:26:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:26:00 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:26:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:26:00 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:26:00 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:26:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:26:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:26:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:26:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:26:00 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:26:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:26:00 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:26:00 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:26:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:26:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:26:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:26:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:26:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:26:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:26:00 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:26:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:26:00 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:26:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:00 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:26:00 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:26:00 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:26:00 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:26:00 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:26:00 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:26:00 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:26:00 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:26:00 INFO WorkspaceExplorerServiceImpl:142 - end time - 227 msc 0 sec +2016-04-08 20:26:00 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:26:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:26:03 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:26:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:26:03 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-08 20:26:03 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:26:03 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:26:04 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:26:04 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:26:04 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:26:04 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-08 20:26:04 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-08 20:26:04 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-08 20:26:04 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-08 20:26:04 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-08 20:26:04 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-08 20:26:04 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:26:04 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:26:04 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:26:04 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:26:04 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-08 20:26:04 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:26:04 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:26:04 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:26:04 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:26:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:26:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-08 20:26:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:04 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:26:04 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:26:04 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:04 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:26:04 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:26:04 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:26:04 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:26:04 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-08 20:26:04 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:26:04 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-08 20:26:04 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:26:04 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-08 20:26:04 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:26:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-08 20:26:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:26:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:04 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:26:04 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-08 20:26:04 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:04 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:26:04 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-08 20:26:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:26:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-08 20:26:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-08 20:26:04 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-08 20:26:04 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:04 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:26:04 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 20:26:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-08 20:26:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-08 20:26:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 20:26:04 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-08 20:26:04 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:04 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:26:04 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:26:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-08 20:26:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-08 20:26:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:26:04 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-08 20:26:04 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:04 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:26:04 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-08 20:26:04 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-08 20:26:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-08 20:26:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-08 20:26:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:04 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-08 20:26:04 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-08 20:26:04 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:04 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:26:04 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 20:26:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-08 20:26:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-08 20:26:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 20:26:04 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 20:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:26:04 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:26:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:26:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:26:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:26:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:26:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:26:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:26:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:26:04 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:26:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:26:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:26:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:26:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:26:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:26:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:26:04 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:26:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:26:04 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:26:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:26:04 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:26:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:26:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:26:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:26:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:26:04 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:26:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:26:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:26:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:26:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:26:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:26:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:26:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:26:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:26:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:26:04 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:26:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:04 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:26:04 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:26:04 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:26:04 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:26:04 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:26:04 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:26:04 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:26:04 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:26:04 INFO WorkspaceExplorerServiceImpl:142 - end time - 182 msc 0 sec +2016-04-08 20:26:04 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:26:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:26:07 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:26:07 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:26:07 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS +2016-04-08 20:26:07 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:26:07 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:26:07 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS + XMEANS + A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + + + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + + + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:26:07 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:26:07 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:26:07 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:26:07 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:26:07 DEBUG SClient4WPS:290 - WPSClient->Input: + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + +2016-04-08 20:26:07 DEBUG SClient4WPS:290 - WPSClient->Input: + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + +2016-04-08 20:26:07 DEBUG SClient4WPS:290 - WPSClient->Input: + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + +2016-04-08 20:26:07 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 20:26:07 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:26:07 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:26:07 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:26:07 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:26:07 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:26:07 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:26:07 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:26:07 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:26:07 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:26:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:26:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:26:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:07 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:26:07 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:26:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:07 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:26:07 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:26:07 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:26:07 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:26:07 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:26:07 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:26:07 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:26:07 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:26:07 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:26:07 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:26:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:26:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:26:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:07 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:26:07 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:26:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:07 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:26:07 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:26:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:26:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:26:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:26:07 DEBUG WPS2SM:254 - Conversion to SM Type->maxIterations is a Literal Input +2016-04-08 20:26:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:07 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:26:07 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:26:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:XMeans max number of overall iterations of the clustering learning +2016-04-08 20:26:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxIterations +2016-04-08 20:26:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:26:07 DEBUG WPS2SM:254 - Conversion to SM Type->minClusters is a Literal Input +2016-04-08 20:26:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:07 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:26:07 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 20:26:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:minimum number of expected clusters +2016-04-08 20:26:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minClusters +2016-04-08 20:26:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 20:26:07 DEBUG WPS2SM:254 - Conversion to SM Type->maxClusters is a Literal Input +2016-04-08 20:26:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:07 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:26:07 DEBUG WPS2SM:101 - Guessed default value: 50 +2016-04-08 20:26:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of clusters to produce +2016-04-08 20:26:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxClusters +2016-04-08 20:26:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT] +2016-04-08 20:26:07 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:26:07 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:07 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:26:07 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 20:26:07 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 20:26:07 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:26:07 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:07 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 20:26:07 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 20:26:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:26:07 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:26:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:26:07 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:26:07 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:26:07 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:26:07 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:26:07 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:26:07 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:26:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:26:07 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:26:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:26:07 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:26:07 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:26:07 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:26:07 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:26:07 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:26:07 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:26:07 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:26:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:26:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:26:07 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:26:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:26:07 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:26:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:26:07 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:26:07 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:26:07 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:26:07 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:26:07 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:26:07 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:26:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:26:07 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:26:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:26:07 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:26:07 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:26:07 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:26:07 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:26:07 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:26:07 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:26:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:26:07 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:26:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:07 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:26:07 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:26:07 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:26:07 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:26:07 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:26:07 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:26:07 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:26:07 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:26:07 INFO WorkspaceExplorerServiceImpl:142 - end time - 192 msc 0 sec +2016-04-08 20:26:07 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:26:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:26:09 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:26:09 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:26:09 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-08 20:26:09 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:26:09 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:26:09 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:26:09 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:26:09 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:26:09 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-08 20:26:09 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-08 20:26:09 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-08 20:26:09 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-08 20:26:09 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-08 20:26:09 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-08 20:26:09 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:26:09 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:26:09 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:26:09 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:26:09 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-08 20:26:09 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:26:09 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:26:09 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:26:09 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:26:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:26:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-08 20:26:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:09 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:26:09 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:26:09 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:09 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:26:09 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:26:09 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:26:09 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:26:09 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-08 20:26:09 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:26:09 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-08 20:26:09 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:26:09 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-08 20:26:09 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:26:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-08 20:26:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:26:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:09 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:26:09 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-08 20:26:09 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:09 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:26:09 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-08 20:26:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:26:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-08 20:26:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-08 20:26:09 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-08 20:26:09 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:09 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:26:09 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 20:26:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-08 20:26:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-08 20:26:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 20:26:09 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-08 20:26:09 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:09 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:26:09 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:26:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-08 20:26:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-08 20:26:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:26:09 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-08 20:26:09 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:09 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:26:09 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-08 20:26:09 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-08 20:26:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-08 20:26:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-08 20:26:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:09 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-08 20:26:09 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-08 20:26:09 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:09 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:26:09 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 20:26:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-08 20:26:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-08 20:26:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 20:26:09 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 20:26:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:26:09 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:26:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:26:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:26:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:26:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:26:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:26:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:26:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:26:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:26:09 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:26:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:26:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:26:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:26:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:26:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:26:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:26:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:26:09 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:26:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:26:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:26:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:09 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:26:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:26:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:26:09 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:26:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:26:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:26:09 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:26:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:26:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:26:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:26:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:26:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:26:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:26:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:26:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:26:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:26:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:26:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:26:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:26:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:26:09 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:26:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:09 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:26:09 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:26:09 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:26:09 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:26:09 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:26:09 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:26:09 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:26:09 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:26:09 INFO WorkspaceExplorerServiceImpl:142 - end time - 186 msc 0 sec +2016-04-08 20:26:09 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:26:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:26:34 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:26:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:26:54 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:26:54 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:26:54 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS +2016-04-08 20:26:54 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:26:54 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:26:54 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS + XMEANS + A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + + + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + + + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:26:54 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:26:54 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:26:54 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:26:54 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:26:54 DEBUG SClient4WPS:290 - WPSClient->Input: + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + +2016-04-08 20:26:54 DEBUG SClient4WPS:290 - WPSClient->Input: + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + +2016-04-08 20:26:54 DEBUG SClient4WPS:290 - WPSClient->Input: + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + +2016-04-08 20:26:54 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 20:26:54 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:26:54 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:26:54 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:26:54 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:26:54 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:26:54 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:26:54 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:26:54 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:26:54 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:26:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:26:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:26:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:54 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:26:54 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:26:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:54 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:26:54 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:26:54 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:26:54 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:26:54 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:26:54 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:26:54 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:26:54 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:26:54 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:26:54 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:26:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:26:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:26:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:54 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:26:54 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:26:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:54 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:26:54 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:26:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:26:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:26:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:26:54 DEBUG WPS2SM:254 - Conversion to SM Type->maxIterations is a Literal Input +2016-04-08 20:26:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:26:54 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:26:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:XMeans max number of overall iterations of the clustering learning +2016-04-08 20:26:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxIterations +2016-04-08 20:26:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:26:54 DEBUG WPS2SM:254 - Conversion to SM Type->minClusters is a Literal Input +2016-04-08 20:26:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:26:54 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 20:26:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:minimum number of expected clusters +2016-04-08 20:26:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minClusters +2016-04-08 20:26:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 20:26:54 DEBUG WPS2SM:254 - Conversion to SM Type->maxClusters is a Literal Input +2016-04-08 20:26:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:26:54 DEBUG WPS2SM:101 - Guessed default value: 50 +2016-04-08 20:26:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of clusters to produce +2016-04-08 20:26:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxClusters +2016-04-08 20:26:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT] +2016-04-08 20:26:54 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:26:54 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:26:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:26:54 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 20:26:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 20:26:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:26:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:26:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 20:26:54 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 20:26:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:26:55 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:26:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:26:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:26:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:26:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:26:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:26:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:26:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:26:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:26:55 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:26:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:26:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:26:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:26:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:26:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:26:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:26:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:26:55 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:26:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:26:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:26:55 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:26:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:26:55 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:26:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:26:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:26:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:26:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:26:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:26:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:26:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:26:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:26:55 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:26:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:26:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:26:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:26:55 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:26:55 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:26:55 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:26:55 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:26:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:26:55 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:26:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:55 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:26:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:55 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:26:55 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:26:55 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:26:55 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:26:55 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:26:55 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:26:55 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:26:55 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:26:55 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:26:55 INFO WorkspaceExplorerServiceImpl:142 - end time - 214 msc 0 sec +2016-04-08 20:26:55 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:27:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:27:29 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:28:15 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 20:28:15 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 20:28:15 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 20:28:15 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 20:28:15 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 20:28:15 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:28:15 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 20:28:15 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@5f10485a +2016-04-08 20:28:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:28:15 INFO ASLSession:352 - Logging the entrance +2016-04-08 20:28:15 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 20:28:15 DEBUG TemplateModel:83 - 2016-04-08 20:28:15, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 20:28:15 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:28:15 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 20:28:19 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 20:28:19 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 20:28:19 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 20:28:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:28:19 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:28:19 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:28:19 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:28:19 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 123 ms +2016-04-08 20:28:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 20:28:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 20:28:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 20:28:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 20:28:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 20:28:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 20:28:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 20:28:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 20:28:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 20:28:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 20:28:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 20:28:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 20:28:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 20:28:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 20:28:19 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 20:28:19 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:28:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:28:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@4de771dd +2016-04-08 20:28:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@75e4d712 +2016-04-08 20:28:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@7a54cce5 +2016-04-08 20:28:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@256e30a7 +2016-04-08 20:28:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 138 ms +2016-04-08 20:28:19 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 20:28:19 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 20:28:19 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 20:28:19 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 20:28:19 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 20:28:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:28:19 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:28:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 20:28:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 27 ms +2016-04-08 20:28:19 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 20:28:19 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:28:19 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 20:28:19 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:28:20 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:28:20 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 20:28:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:28:22 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:28:22 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:28:22 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 20:28:22 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:28:22 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:28:23 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:28:23 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:28:23 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:28:23 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:28:23 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:28:23 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 20:28:23 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 20:28:23 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:28:23 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:28:23 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:28:23 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:28:23 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:28:23 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:28:23 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:28:23 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:28:23 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:28:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:28:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:28:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:23 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:28:23 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:28:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:28:23 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:28:23 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:28:23 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:28:23 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:28:23 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:28:23 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:28:23 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:28:23 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:28:23 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:28:23 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:28:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:28:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:28:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:23 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:28:23 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:28:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:28:23 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:28:23 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:28:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:28:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:28:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:28:23 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 20:28:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:28:23 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:28:23 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:28:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 20:28:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 20:28:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:28:23 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:28:23 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:28:23 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:28:23 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 20:28:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 20:28:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:28:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 20:28:23 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 20:28:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:28:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:28:23 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:28:23 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:28:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:28:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:28:23 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:28:23 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:28:23 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:28:23 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:28:23 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:28:23 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:28:23 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:28:23 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:28:23 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:28:23 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 20:28:23 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 20:28:23 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 20:28:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-08 20:28:23 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:28:23 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 20:28:23 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-08 20:28:23 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 20:28:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:28:23 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:28:23 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:28:23 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:28:23 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 20:28:23 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:28:23 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:28:23 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:28:23 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 20:28:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:28:23 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:28:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:28:23 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:28:23 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:28:23 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:28:23 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:28:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:28:23 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:28:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:28:23 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:28:23 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:28:23 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:28:23 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:28:24 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:28:24 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:28:24 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 20:28:24 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:28:24 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:28:24 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 20:28:24 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 20:28:24 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:28:24 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:28:24 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:28:24 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:28:24 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:28:24 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:28:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:24 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:28:24 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:28:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:24 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:28:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:28:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:28:24 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:28:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:28:24 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:28:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:24 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:24 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:28:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 20:28:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:28:24 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:28:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:28:24 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 29 ms +2016-04-08 20:28:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 20:28:24 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:28:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 20:28:24 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-08 20:28:24 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 20:28:24 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 20:28:24 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 20:28:24 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 20:28:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 20:28:24 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 20:28:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 20:28:24 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:28:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:28:24 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 21 ms +2016-04-08 20:28:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 20:28:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 20:28:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-08 20:28:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:28:24 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:28:24 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:28:24 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:28:24 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:28:24 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:28:24 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:28:24 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:28:24 INFO WorkspaceExplorerServiceImpl:142 - end time - 463 msc 0 sec +2016-04-08 20:28:24 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:28:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:28:31 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:28:31 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:28:31 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-08 20:28:31 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:28:31 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:28:32 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:28:32 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:28:32 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:28:32 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:28:32 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:28:32 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-08 20:28:32 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-08 20:28:32 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-08 20:28:32 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 20:28:32 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:28:32 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:28:32 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:28:32 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:28:32 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:28:32 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:28:32 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:28:32 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:28:32 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:28:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:28:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:28:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:32 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:28:32 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:28:32 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:28:32 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:28:32 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:28:32 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:28:32 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:28:32 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:28:32 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:28:32 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:28:32 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:28:32 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:28:32 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:28:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:28:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:28:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:32 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:28:32 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:28:32 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:28:32 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:28:32 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:28:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:28:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:28:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:32 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:28:32 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-08 20:28:32 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:28:32 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:28:32 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-08 20:28:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-08 20:28:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-08 20:28:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:32 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-08 20:28:32 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-08 20:28:32 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:28:32 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:28:32 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:28:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-08 20:28:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-08 20:28:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:32 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:28:32 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-08 20:28:32 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:28:32 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:28:32 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-08 20:28:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-08 20:28:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-08 20:28:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:32 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-08 20:28:32 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:28:32 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:28:32 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:28:32 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 20:28:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 20:28:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:28:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:32 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 20:28:32 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 20:28:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:28:32 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:28:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:28:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:28:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:28:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:28:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:28:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:28:32 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:28:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:28:32 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:28:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:28:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:28:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:28:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:28:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:28:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:28:32 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:28:32 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:28:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:28:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:28:32 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:28:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:28:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:28:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:28:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:28:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:28:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:28:32 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:28:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:28:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:28:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:28:32 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:28:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:28:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:28:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:28:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:28:32 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:28:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:28:32 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:28:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:28:32 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:28:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:32 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:32 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:32 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:28:32 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:28:32 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:28:32 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:28:32 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:28:32 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:28:32 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:28:32 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:28:32 INFO WorkspaceExplorerServiceImpl:142 - end time - 208 msc 0 sec +2016-04-08 20:28:32 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:28:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:28:34 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:28:34 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:28:34 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-08 20:28:34 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:28:34 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:28:34 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:28:34 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:28:34 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:28:34 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-08 20:28:34 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-08 20:28:34 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-08 20:28:34 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-08 20:28:34 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-08 20:28:34 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-08 20:28:34 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:28:34 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:28:34 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:28:34 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:28:34 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-08 20:28:34 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:28:34 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:28:34 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:28:34 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:28:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:28:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-08 20:28:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:34 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:28:34 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:28:34 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:28:34 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:28:34 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:28:34 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:28:34 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:28:34 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-08 20:28:34 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:28:34 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-08 20:28:34 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:28:34 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-08 20:28:34 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:28:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-08 20:28:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:28:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:34 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:28:34 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-08 20:28:34 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:28:34 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:28:34 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-08 20:28:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:28:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-08 20:28:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-08 20:28:34 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-08 20:28:34 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:28:34 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:28:34 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 20:28:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-08 20:28:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-08 20:28:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 20:28:34 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-08 20:28:34 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:28:34 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:28:34 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:28:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-08 20:28:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-08 20:28:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:28:34 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-08 20:28:34 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:28:34 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:28:34 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-08 20:28:34 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-08 20:28:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-08 20:28:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-08 20:28:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:34 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-08 20:28:34 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-08 20:28:34 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:28:34 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:28:34 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 20:28:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-08 20:28:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-08 20:28:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:28:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 20:28:34 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 20:28:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:28:34 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:28:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:28:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:28:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:28:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:28:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:28:34 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:28:34 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:28:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:28:34 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:28:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:28:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:28:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:28:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:28:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:28:34 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:28:34 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:28:34 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:28:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:28:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:28:34 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:28:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:28:34 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:28:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:28:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:28:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:28:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:28:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:28:34 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:28:34 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:28:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:28:34 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:28:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:28:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:28:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:28:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:28:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:28:34 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:28:34 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:28:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:28:34 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:28:34 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:28:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:34 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:28:34 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:28:35 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:28:35 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:28:35 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:28:35 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:28:35 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:28:35 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:28:35 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:28:35 INFO WorkspaceExplorerServiceImpl:142 - end time - 181 msc 0 sec +2016-04-08 20:28:35 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:29:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:29:10 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:29:45 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 20:29:45 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 20:29:45 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 20:29:45 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 20:29:45 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 20:29:45 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:29:45 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 20:29:45 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@18d77d02 +2016-04-08 20:29:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:29:45 INFO ASLSession:352 - Logging the entrance +2016-04-08 20:29:45 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 20:29:45 DEBUG TemplateModel:83 - 2016-04-08 20:29:45, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 20:29:45 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:29:45 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 20:29:48 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 20:29:48 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 20:29:48 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 20:29:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:29:48 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:29:48 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:29:49 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:29:49 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 120 ms +2016-04-08 20:29:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 20:29:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 20:29:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 20:29:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 20:29:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 20:29:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 20:29:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 20:29:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 20:29:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 20:29:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 20:29:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 20:29:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 20:29:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 20:29:49 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 20:29:49 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 20:29:49 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:29:49 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:29:49 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@41251139 +2016-04-08 20:29:49 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@f88d42d +2016-04-08 20:29:49 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@376645b8 +2016-04-08 20:29:49 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@282a1656 +2016-04-08 20:29:49 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 109 ms +2016-04-08 20:29:49 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 20:29:49 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 20:29:49 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 20:29:49 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 20:29:49 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 20:29:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:29:49 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:29:49 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 20:29:49 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 26 ms +2016-04-08 20:29:49 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 20:29:49 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:29:49 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 20:29:49 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:29:50 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:29:50 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 20:29:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:29:53 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:29:53 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:29:53 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 20:29:53 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:29:53 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:29:53 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:29:53 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:29:53 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:29:53 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:29:53 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:29:53 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 20:29:53 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 20:29:53 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:29:53 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:29:53 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:29:53 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:29:53 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:29:53 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:29:53 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:29:53 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:29:53 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:29:53 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:29:53 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:29:53 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:29:53 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:29:53 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:29:53 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:29:53 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:29:53 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:29:53 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:29:53 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:29:53 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:29:53 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:29:53 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:29:53 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:29:53 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:29:53 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:29:53 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:29:53 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:29:53 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:29:53 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:29:53 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:29:53 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:29:53 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:29:53 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:29:53 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:29:53 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:29:53 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:29:53 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:29:53 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 20:29:53 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:29:53 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:29:53 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:29:53 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 20:29:53 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 20:29:53 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:29:53 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:29:53 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:29:53 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:29:53 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:29:53 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 20:29:53 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 20:29:53 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:29:53 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:29:53 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 20:29:53 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 20:29:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:29:54 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:29:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:29:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:29:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:29:54 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:29:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:29:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:29:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:29:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:29:54 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:29:54 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:29:54 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:29:54 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 20:29:54 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 20:29:54 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:29:54 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 20:29:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-08 20:29:54 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:29:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:29:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 20:29:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-08 20:29:54 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 20:29:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:29:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:29:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:29:54 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 20:29:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:29:54 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 20:29:54 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:29:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:29:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:29:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:29:54 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:29:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:29:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:29:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:29:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:29:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:29:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:29:54 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:29:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:29:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:29:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:29:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:29:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:29:54 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:29:54 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:29:54 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 20:29:54 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:29:54 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:29:54 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 20:29:54 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 20:29:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:29:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:29:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:29:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:29:54 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:29:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:29:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:29:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:29:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:29:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:29:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:29:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:29:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:29:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:29:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:29:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:29:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:29:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:29:54 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:29:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:29:54 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:29:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:29:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:29:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:29:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:29:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:29:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:29:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:29:54 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:29:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:29:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:29:55 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:29:55 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:29:55 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 31 ms +2016-04-08 20:29:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:29:55 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:29:55 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 20:29:55 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-08 20:29:55 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 20:29:55 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 20:29:55 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 20:29:55 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 20:29:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:29:55 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 20:29:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:29:55 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:29:55 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:29:55 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 20:29:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:29:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:29:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-08 20:29:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:29:55 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:29:55 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:29:55 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:29:55 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:29:55 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:29:55 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:29:55 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:29:55 INFO WorkspaceExplorerServiceImpl:142 - end time - 446 msc 0 sec +2016-04-08 20:29:55 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:30:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-08 20:30:03 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-08 20:30:03 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:30:03 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-08 20:30:03 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:30:03 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:30:03 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:30:03 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:30:03 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:30:03 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:30:03 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:30:03 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-08 20:30:03 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-08 20:30:03 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-08 20:30:03 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-08 20:30:03 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:30:03 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:30:03 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:30:03 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:30:03 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:30:03 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:30:03 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:30:03 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:30:03 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:30:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:30:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:30:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:30:03 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:30:03 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:30:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:30:03 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:30:03 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:30:03 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:30:03 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:30:03 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:30:03 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:30:03 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:30:03 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:30:03 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:30:03 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:30:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:30:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:30:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:30:03 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:30:03 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:30:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:30:03 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:30:03 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:30:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:30:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:30:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:30:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:30:03 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-08 20:30:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:30:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:30:03 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-08 20:30:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-08 20:30:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-08 20:30:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:30:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-08 20:30:03 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-08 20:30:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:30:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:30:03 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:30:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-08 20:30:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-08 20:30:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:30:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:30:03 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-08 20:30:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:30:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:30:03 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-08 20:30:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-08 20:30:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-08 20:30:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:30:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-08 20:30:03 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:30:03 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:30:03 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:30:03 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-08 20:30:03 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-08 20:30:03 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:30:03 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:30:03 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-08 20:30:03 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-08 20:30:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:30:03 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:30:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:30:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:30:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:30:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:30:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:30:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:30:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:30:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:30:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:30:03 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:30:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:30:03 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:30:03 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:30:03 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:30:03 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:30:03 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:30:03 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:30:03 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:30:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:30:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:30:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:30:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:30:04 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:30:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:30:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:30:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:30:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:30:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:30:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:30:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:30:04 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:30:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:30:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:30:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:30:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:30:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:30:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:30:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:30:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:30:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:30:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:30:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-08 20:30:04 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-08 20:30:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:30:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:30:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:30:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:30:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:30:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:30:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:30:04 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:30:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:30:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:30:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:30:04 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:30:04 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:30:04 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:30:04 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:30:04 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:30:04 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:30:04 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:30:04 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:30:04 INFO WorkspaceExplorerServiceImpl:142 - end time - 203 msc 0 sec +2016-04-08 20:30:04 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:30:40 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-08 20:30:40 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-08 20:30:40 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-08 20:30:40 INFO SessionUtil:49 - no user found in session, use test user +2016-04-08 20:30:40 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-08 20:30:40 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:30:40 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-08 20:30:40 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@394f47f2 +2016-04-08 20:30:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:30:40 INFO ASLSession:352 - Logging the entrance +2016-04-08 20:30:40 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-08 20:30:40 DEBUG TemplateModel:83 - 2016-04-08 20:30:40, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-08 20:30:40 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:30:40 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-08 20:30:45 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-08 20:30:45 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-08 20:30:45 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-08 20:30:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-08 20:30:45 DEBUG ASLSession:458 - Getting security token: null in thread 28 +2016-04-08 20:30:45 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:30:45 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:30:45 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 123 ms +2016-04-08 20:30:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-08 20:30:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-08 20:30:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-08 20:30:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-08 20:30:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-08 20:30:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-08 20:30:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-08 20:30:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-08 20:30:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-08 20:30:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-08 20:30:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-08 20:30:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-08 20:30:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-08 20:30:45 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-08 20:30:45 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-08 20:30:45 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:30:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-08 20:30:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@1ceac987 +2016-04-08 20:30:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@6a4297f +2016-04-08 20:30:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6b2a1485 +2016-04-08 20:30:45 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@20321066 +2016-04-08 20:30:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 97 ms +2016-04-08 20:30:45 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-08 20:30:45 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-08 20:30:45 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-08 20:30:45 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-08 20:30:45 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-08 20:30:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-08 20:30:45 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:30:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-08 20:30:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 21 ms +2016-04-08 20:30:45 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-08 20:30:45 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:30:45 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-08 20:30:45 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:30:46 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:30:46 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-08 20:30:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:30:51 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:30:51 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:30:51 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 20:30:51 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:30:51 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:30:51 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:30:51 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:30:51 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:30:51 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:30:51 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:30:51 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 20:30:51 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 20:30:51 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:30:51 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:30:51 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:30:51 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:30:51 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:30:51 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:30:51 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:30:51 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:30:51 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:30:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:30:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:30:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:30:51 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:30:51 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:30:51 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:30:51 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:30:51 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:30:51 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:30:51 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:30:51 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:30:51 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:30:51 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:30:51 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:30:51 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:30:51 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:30:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:30:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:30:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:30:51 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:30:51 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:30:51 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:30:51 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:30:51 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:30:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:30:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:30:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:30:51 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:30:51 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 20:30:51 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:30:51 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:30:51 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:30:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 20:30:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 20:30:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:30:51 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:30:51 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:30:51 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:30:51 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:30:51 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 20:30:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 20:30:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:30:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:30:51 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 20:30:51 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 20:30:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:30:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-08 20:30:51 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:30:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:30:51 DEBUG ASLSession:458 - Getting security token: null in thread 28 +2016-04-08 20:30:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-08 20:30:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:30:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:30:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:30:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:30:52 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:30:52 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:30:52 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-08 20:30:52 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-08 20:30:52 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:30:52 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-08 20:30:52 DEBUG JCRRepository:271 - Initialize repository +2016-04-08 20:30:52 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-08 20:30:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 28 +2016-04-08 20:30:52 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:30:52 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-08 20:30:52 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-08 20:30:52 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-08 20:30:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-08 20:30:52 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:30:52 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-08 20:30:52 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:30:52 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-08 20:30:52 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:30:52 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:30:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:30:52 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-08 20:30:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 20:30:52 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 20:30:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 20:30:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:30:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:30:52 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:30:52 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:30:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:30:52 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:30:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:30:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:30:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:30:52 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:30:52 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:30:52 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:30:52 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:30:52 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-08 20:30:52 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-08 20:30:52 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-08 20:30:52 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-08 20:30:52 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-08 20:30:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:30:52 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:30:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:30:52 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:30:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:30:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:30:52 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:30:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:30:52 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:30:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:30:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:30:52 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:30:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:30:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:30:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:30:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:30:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-08 20:30:52 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-08 20:30:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:30:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:30:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 20:30:52 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 20:30:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:30:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:30:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:30:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:30:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:30:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:30:52 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:30:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 20:30:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:30:53 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:30:53 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:30:53 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 33 ms +2016-04-08 20:30:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 20:30:53 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:30:53 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-08 20:30:53 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-08 20:30:53 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-08 20:30:53 INFO ISClientConnector:82 - found only one RR, take it +2016-04-08 20:30:53 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-08 20:30:53 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-08 20:30:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 20:30:53 DEBUG StorageClient:517 - set scope: /gcube +2016-04-08 20:30:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 20:30:53 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-08 20:30:53 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-08 20:30:53 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-08 20:30:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 20:30:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 20:30:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-08 20:30:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:30:53 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:30:53 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:30:53 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:30:53 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:30:53 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:30:53 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:30:53 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:30:53 INFO WorkspaceExplorerServiceImpl:142 - end time - 444 msc 0 sec +2016-04-08 20:30:53 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:31:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-08 20:31:35 DEBUG ASLSession:458 - Getting security token: null in thread 28 +2016-04-08 20:32:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:32:30 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:32:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 20:32:34 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 20:32:34 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:32:34 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING +2016-04-08 20:32:34 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:32:34 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:32:35 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING + MAX_ENT_NICHE_MODELLING + A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt + + + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + + + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + + + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + + + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + + + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + + + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + + + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + + + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + + + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + + + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + + + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + + + + + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + + + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + + + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + + + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + + + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + + + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:32:35 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:32:35 DEBUG SClient4WPS:290 - WPSClient->Input: + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + +2016-04-08 20:32:35 DEBUG SClient4WPS:290 - WPSClient->Input: + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + +2016-04-08 20:32:35 DEBUG SClient4WPS:290 - WPSClient->Input: + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + +2016-04-08 20:32:35 DEBUG SClient4WPS:290 - WPSClient->Input: + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + +2016-04-08 20:32:35 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:32:35 DEBUG SClient4WPS:290 - WPSClient->Input: + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + +2016-04-08 20:32:35 DEBUG SClient4WPS:290 - WPSClient->Input: + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + +2016-04-08 20:32:35 DEBUG SClient4WPS:290 - WPSClient->Input: + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + +2016-04-08 20:32:35 DEBUG SClient4WPS:290 - WPSClient->Input: + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + +2016-04-08 20:32:35 DEBUG SClient4WPS:290 - WPSClient->Input: + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + +2016-04-08 20:32:35 DEBUG SClient4WPS:290 - WPSClient->Input: + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + +2016-04-08 20:32:35 DEBUG SClient4WPS:290 - WPSClient->Input: + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + +2016-04-08 20:32:35 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:32:35 DEBUG SClient4WPS:297 - WPSClient->Output: + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + +2016-04-08 20:32:35 DEBUG SClient4WPS:297 - WPSClient->Output: + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + +2016-04-08 20:32:35 DEBUG SClient4WPS:297 - WPSClient->Output: + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + +2016-04-08 20:32:35 DEBUG SClient4WPS:297 - WPSClient->Output: + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + +2016-04-08 20:32:35 DEBUG SClient4WPS:297 - WPSClient->Output: + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + +2016-04-08 20:32:35 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:32:35 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:32:35 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:32:35 DEBUG WPS2SM:254 - Conversion to SM Type->OutputTableLabel is a Literal Input +2016-04-08 20:32:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:32:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:32:35 DEBUG WPS2SM:101 - Guessed default value: maxent_ +2016-04-08 20:32:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the table to produce +2016-04-08 20:32:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OutputTableLabel +2016-04-08 20:32:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:32:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT] +2016-04-08 20:32:35 DEBUG WPS2SM:254 - Conversion to SM Type->SpeciesName is a Literal Input +2016-04-08 20:32:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:32:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:32:35 DEBUG WPS2SM:101 - Guessed default value: generic_species +2016-04-08 20:32:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the species to model and the occurrence records refer to +2016-04-08 20:32:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:SpeciesName +2016-04-08 20:32:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:32:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT] +2016-04-08 20:32:35 DEBUG WPS2SM:254 - Conversion to SM Type->MaxIterations is a Literal Input +2016-04-08 20:32:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:32:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:32:35 DEBUG WPS2SM:101 - Guessed default value: 1000 +2016-04-08 20:32:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The number of learning iterations of the MaxEnt algorithm +2016-04-08 20:32:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:MaxIterations +2016-04-08 20:32:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:32:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT] +2016-04-08 20:32:35 DEBUG WPS2SM:254 - Conversion to SM Type->DefaultPrevalence is a Literal Input +2016-04-08 20:32:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:32:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-08 20:32:35 DEBUG WPS2SM:101 - Guessed default value: 0.5 +2016-04-08 20:32:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A priori probability of presence at ordinary occurrence points +2016-04-08 20:32:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:DefaultPrevalence +2016-04-08 20:32:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:32:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT] +2016-04-08 20:32:35 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencesTable is a Complex Input +2016-04-08 20:32:35 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:32:35 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:32:35 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:32:35 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:32:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] +2016-04-08 20:32:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencesTable +2016-04-08 20:32:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:32:35 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:32:35 DEBUG WPS2SM:254 - Conversion to SM Type->LongitudeColumn is a Literal Input +2016-04-08 20:32:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:32:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:32:35 DEBUG WPS2SM:101 - Guessed default value: decimallongitude +2016-04-08 20:32:35 DEBUG WPS2SM:130 - Machter title: The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude] +2016-04-08 20:32:35 DEBUG WPS2SM:131 - Machter find: true +2016-04-08 20:32:35 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-08 20:32:35 DEBUG WPS2SM:133 - Machter start: 40 +2016-04-08 20:32:35 DEBUG WPS2SM:134 - Machter end: 82 +2016-04-08 20:32:35 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-08 20:32:35 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-08 20:32:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing longitude values [the name of a column from OccurrencesTable] +2016-04-08 20:32:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LongitudeColumn +2016-04-08 20:32:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:32:35 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN] +2016-04-08 20:32:35 DEBUG WPS2SM:254 - Conversion to SM Type->LatitudeColumn is a Literal Input +2016-04-08 20:32:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:32:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:32:35 DEBUG WPS2SM:101 - Guessed default value: decimallatitude +2016-04-08 20:32:35 DEBUG WPS2SM:130 - Machter title: The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude] +2016-04-08 20:32:35 DEBUG WPS2SM:131 - Machter find: true +2016-04-08 20:32:35 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-08 20:32:35 DEBUG WPS2SM:133 - Machter start: 39 +2016-04-08 20:32:35 DEBUG WPS2SM:134 - Machter end: 81 +2016-04-08 20:32:35 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-08 20:32:35 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-08 20:32:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing latitude values [the name of a column from OccurrencesTable] +2016-04-08 20:32:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LatitudeColumn +2016-04-08 20:32:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:32:35 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN] +2016-04-08 20:32:35 DEBUG WPS2SM:254 - Conversion to SM Type->XResolution is a Literal Input +2016-04-08 20:32:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:32:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-08 20:32:35 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 20:32:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the X axis in decimal degrees +2016-04-08 20:32:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:XResolution +2016-04-08 20:32:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:32:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 20:32:35 DEBUG WPS2SM:254 - Conversion to SM Type->YResolution is a Literal Input +2016-04-08 20:32:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:32:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-08 20:32:35 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 20:32:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the Y axis in decimal degrees +2016-04-08 20:32:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:YResolution +2016-04-08 20:32:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:32:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 20:32:35 DEBUG WPS2SM:254 - Conversion to SM Type->Layers is a Literal Input +2016-04-08 20:32:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:32:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:32:35 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:32:35 DEBUG WPS2SM:147 - Machter title: The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:32:35 DEBUG WPS2SM:148 - Machter find: true +2016-04-08 20:32:35 DEBUG WPS2SM:149 - Machter group: a sequence of values separated by | +2016-04-08 20:32:35 DEBUG WPS2SM:150 - Machter start: 501 +2016-04-08 20:32:35 DEBUG WPS2SM:151 - Machter end: 536 +2016-04-08 20:32:35 DEBUG WPS2SM:152 - Machter Group Count: 1 +2016-04-08 20:32:35 DEBUG WPS2SM:155 - Matcher separator: | +2016-04-08 20:32:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) +2016-04-08 20:32:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Layers +2016-04-08 20:32:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:32:35 DEBUG SClient4WPS:645 - InputParameter: ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST] +2016-04-08 20:32:35 DEBUG WPS2SM:254 - Conversion to SM Type->Z is a Literal Input +2016-04-08 20:32:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:32:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-08 20:32:35 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-08 20:32:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer +2016-04-08 20:32:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Z +2016-04-08 20:32:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:32:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-08 20:32:35 DEBUG WPS2SM:254 - Conversion to SM Type->TimeIndex is a Literal Input +2016-04-08 20:32:35 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:32:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:32:35 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-08 20:32:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Time Index. The default is the first time indexed in the input environmental datasets +2016-04-08 20:32:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:TimeIndex +2016-04-08 20:32:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:32:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-08 20:32:35 DEBUG SClient4WPS:649 - Parameters: [ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT], TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN], Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST], ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT]] +2016-04-08 20:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-08 20:32:35 DEBUG ASLSession:458 - Getting security token: null in thread 28 +2016-04-08 20:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-08 20:32:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:32:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:32:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:32:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:32:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:32:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:32:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:32:35 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:32:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:32:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:32:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:32:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:32:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:32:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:32:35 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:32:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:32:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:32:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-08 20:32:35 DEBUG ASLSession:458 - Getting security token: null in thread 28 +2016-04-08 20:32:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:32:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:32:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:32:35 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:32:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:32:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:32:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:32:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:32:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:32:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:32:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:32:35 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:32:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:32:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:32:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:32:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:32:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:32:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:32:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:32:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:32:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:32:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:32:35 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:32:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:32:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:32:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:32:35 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:32:35 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:32:35 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:32:35 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:32:35 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:32:35 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:32:35 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:32:35 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:32:35 INFO WorkspaceExplorerServiceImpl:142 - end time - 230 msc 0 sec +2016-04-08 20:32:35 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-08 20:33:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 20:33:25 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 20:33:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-08 20:33:31 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-08 20:33:31 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-08 20:33:31 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-08 20:33:31 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:33:31 INFO StatWPSClientSession:84 - CONNECT +2016-04-08 20:33:31 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-08 20:33:31 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-08 20:33:31 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-08 20:33:31 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-08 20:33:31 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-08 20:33:31 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-08 20:33:31 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-08 20:33:31 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-08 20:33:31 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-08 20:33:31 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-08 20:33:31 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-08 20:33:31 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-08 20:33:31 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-08 20:33:31 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-08 20:33:31 DEBUG WPS2SM:201 - Schema: null +2016-04-08 20:33:31 DEBUG WPS2SM:202 - Encoding: null +2016-04-08 20:33:31 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-08 20:33:31 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-08 20:33:31 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:33:31 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-08 20:33:31 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-08 20:33:31 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:33:31 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:33:31 DEBUG WPS2SM:101 - Guessed default value: +2016-04-08 20:33:31 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-08 20:33:31 DEBUG WPS2SM:112 - Machter find: true +2016-04-08 20:33:31 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-08 20:33:31 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-08 20:33:31 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-08 20:33:31 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-08 20:33:31 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-08 20:33:31 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-08 20:33:31 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-08 20:33:31 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-08 20:33:31 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:33:31 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-08 20:33:31 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-08 20:33:31 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:33:31 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-08 20:33:31 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-08 20:33:31 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-08 20:33:31 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-08 20:33:31 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:33:31 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-08 20:33:31 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-08 20:33:31 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:33:31 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:33:31 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-08 20:33:31 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-08 20:33:31 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-08 20:33:31 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:33:31 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-08 20:33:31 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-08 20:33:31 DEBUG WPS2SM:93 - WPS type: +2016-04-08 20:33:31 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-08 20:33:31 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-08 20:33:31 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-08 20:33:31 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-08 20:33:31 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-08 20:33:31 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-08 20:33:31 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-08 20:33:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:33:31 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:33:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:33:31 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:33:31 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:33:31 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:33:31 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:33:31 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:33:31 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:33:31 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:33:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 20:33:31 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-08 20:33:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-08 20:33:31 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:33:31 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:33:31 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:33:31 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:33:31 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:33:31 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:33:31 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-08 20:33:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-08 20:33:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:33:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-08 20:33:31 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-08 20:33:31 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:33:31 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:33:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:33:31 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-08 20:33:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-08 20:33:31 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:33:31 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:33:31 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:33:31 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:33:31 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:33:31 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:33:31 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:33:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-08 20:33:31 DEBUG ASLSession:458 - Getting security token: null in thread 28 +2016-04-08 20:33:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-08 20:33:31 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-08 20:33:31 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-08 20:33:31 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-08 20:33:31 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-08 20:33:31 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-08 20:33:31 DEBUG JCRHomeManager:97 - User is already logged +2016-04-08 20:33:31 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:33:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:33:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:33:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:33:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:33:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 28 +2016-04-08 20:33:31 DEBUG ASLSession:458 - Getting security token: null in thread 28 +2016-04-08 20:33:31 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:33:31 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-08 20:33:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:33:31 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-08 20:33:31 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-08 20:33:31 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-08 20:33:31 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-08 20:33:31 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-08 20:33:31 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:33:31 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-08 20:33:31 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-08 20:33:31 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-08 20:33:31 INFO WorkspaceExplorerServiceImpl:142 - end time - 220 msc 0 sec +2016-04-08 20:33:31 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 08:57:19 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-11 08:57:19 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-11 08:57:19 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-11 08:57:19 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 08:57:19 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 08:57:19 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 08:57:19 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 08:57:19 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@7fcd8f19 +2016-04-11 08:57:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 08:57:19 INFO ASLSession:352 - Logging the entrance +2016-04-11 08:57:19 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 08:57:19 DEBUG TemplateModel:83 - 2016-04-11 08:57:19, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 08:57:19 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 08:57:19 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-11 08:57:23 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 08:57:23 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 08:57:23 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 08:57:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 08:57:23 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 08:57:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 08:57:23 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 08:57:23 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 138 ms +2016-04-11 08:57:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-11 08:57:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-11 08:57:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-11 08:57:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-11 08:57:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-11 08:57:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-11 08:57:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-11 08:57:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-11 08:57:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-11 08:57:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-11 08:57:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-11 08:57:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-11 08:57:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-11 08:57:23 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-11 08:57:24 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-11 08:57:24 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 08:57:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 08:57:24 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@44f5e501 +2016-04-11 08:57:24 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@7e45329a +2016-04-11 08:57:24 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@12ca433c +2016-04-11 08:57:24 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@37fdd11 +2016-04-11 08:57:24 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 148 ms +2016-04-11 08:57:24 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-11 08:57:24 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-11 08:57:24 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-11 08:57:24 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-11 08:57:24 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-11 08:57:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 08:57:24 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 08:57:24 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-11 08:57:24 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 31 ms +2016-04-11 08:57:24 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-11 08:57:24 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 08:57:24 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-11 08:57:24 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 08:57:25 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 08:57:25 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-11 08:57:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 08:57:33 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 08:57:33 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 08:57:33 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-11 08:57:33 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 08:57:33 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 08:57:33 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 08:57:33 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 08:57:33 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 08:57:33 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 08:57:33 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 08:57:33 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-11 08:57:33 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-11 08:57:33 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 08:57:33 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 08:57:33 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 08:57:33 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 08:57:34 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 08:57:34 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 08:57:34 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 08:57:34 DEBUG WPS2SM:201 - Schema: null +2016-04-11 08:57:34 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 08:57:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 08:57:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 08:57:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:34 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 08:57:34 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 08:57:34 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:34 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 08:57:34 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 08:57:34 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 08:57:34 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 08:57:34 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 08:57:34 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 08:57:34 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 08:57:34 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 08:57:34 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 08:57:34 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 08:57:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 08:57:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 08:57:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:34 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 08:57:34 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 08:57:34 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:34 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 08:57:34 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 08:57:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 08:57:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 08:57:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 08:57:34 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-11 08:57:34 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:34 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 08:57:34 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 08:57:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-11 08:57:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-11 08:57:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 08:57:34 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 08:57:34 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:34 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 08:57:34 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 08:57:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-11 08:57:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 08:57:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 08:57:34 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-11 08:57:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 08:57:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-11 08:57:34 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 08:57:34 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-11 08:57:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-11 08:57:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 08:57:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 08:57:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 08:57:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 08:57:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 08:57:34 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 08:57:34 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 08:57:34 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 08:57:34 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 08:57:34 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 08:57:34 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-11 08:57:34 DEBUG JCRRepository:271 - Initialize repository +2016-04-11 08:57:34 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-11 08:57:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 29 +2016-04-11 08:57:34 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 08:57:34 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-11 08:57:34 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 20 ms +2016-04-11 08:57:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 08:57:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 08:57:34 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 08:57:34 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 08:57:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 08:57:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 08:57:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 08:57:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 08:57:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 08:57:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 08:57:34 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 08:57:34 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 08:57:34 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 08:57:34 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 08:57:34 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-11 08:57:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-11 08:57:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 08:57:34 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 08:57:34 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 08:57:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 08:57:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 08:57:34 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-11 08:57:34 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 08:57:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 08:57:34 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 08:57:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 08:57:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 08:57:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 08:57:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 08:57:34 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-11 08:57:35 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 08:57:35 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 08:57:35 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-11 08:57:35 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 08:57:35 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 08:57:35 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-11 08:57:35 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-11 08:57:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 08:57:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 08:57:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 08:57:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 08:57:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 08:57:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 08:57:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 08:57:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:35 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 08:57:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 08:57:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 08:57:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 08:57:35 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 08:57:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-11 08:57:35 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-11 08:57:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:35 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 08:57:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 08:57:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 08:57:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 08:57:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 08:57:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 20 ms +2016-04-11 08:57:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 08:57:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 08:57:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-11 08:57:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-11 08:57:35 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-11 08:57:35 INFO ISClientConnector:82 - found only one RR, take it +2016-04-11 08:57:35 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-11 08:57:35 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-11 08:57:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 08:57:35 DEBUG StorageClient:517 - set scope: /gcube +2016-04-11 08:57:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 08:57:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 08:57:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 08:57:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-11 08:57:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 08:57:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 08:57:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 08:57:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 08:57:35 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 08:57:35 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 08:57:35 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 08:57:35 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 08:57:35 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 08:57:35 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 08:57:35 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 08:57:35 INFO WorkspaceExplorerServiceImpl:142 - end time - 469 msc 0 sec +2016-04-11 08:57:35 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 08:57:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 08:57:40 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 08:57:40 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 08:57:40 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-11 08:57:40 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 08:57:40 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 08:57:40 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 08:57:40 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 08:57:40 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 08:57:40 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 08:57:40 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 08:57:40 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-11 08:57:40 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-11 08:57:40 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-11 08:57:40 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 08:57:40 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 08:57:40 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 08:57:40 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 08:57:40 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 08:57:40 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 08:57:40 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 08:57:40 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 08:57:40 DEBUG WPS2SM:201 - Schema: null +2016-04-11 08:57:40 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 08:57:40 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 08:57:40 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 08:57:40 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:40 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 08:57:40 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 08:57:40 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:40 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 08:57:40 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 08:57:40 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 08:57:40 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 08:57:40 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 08:57:40 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 08:57:40 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 08:57:40 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 08:57:40 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 08:57:40 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 08:57:40 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 08:57:40 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 08:57:40 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:40 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 08:57:40 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 08:57:40 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:40 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 08:57:40 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 08:57:40 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 08:57:40 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 08:57:40 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:40 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 08:57:40 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-11 08:57:40 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:40 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 08:57:40 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-11 08:57:40 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-11 08:57:40 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-11 08:57:40 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:40 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-11 08:57:40 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-11 08:57:40 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:40 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 08:57:40 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 08:57:40 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-11 08:57:40 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-11 08:57:40 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:40 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 08:57:40 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-11 08:57:40 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:40 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 08:57:40 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-11 08:57:40 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-11 08:57:40 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-11 08:57:40 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:40 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-11 08:57:40 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 08:57:40 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:40 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 08:57:40 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 08:57:40 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 08:57:40 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 08:57:40 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:40 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 08:57:40 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 08:57:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 08:57:40 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 08:57:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 08:57:40 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 08:57:40 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 08:57:40 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 08:57:40 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 08:57:40 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 08:57:40 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 08:57:40 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 08:57:40 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 08:57:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 08:57:40 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 08:57:40 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 08:57:40 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 08:57:40 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 08:57:40 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 08:57:40 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 08:57:40 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 08:57:40 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:40 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 08:57:40 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 08:57:40 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 08:57:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-11 08:57:40 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-11 08:57:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-11 08:57:40 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 08:57:40 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 08:57:40 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 08:57:40 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 08:57:40 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 08:57:40 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 08:57:40 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:40 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 08:57:40 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 08:57:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 08:57:40 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 08:57:40 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 08:57:40 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 08:57:40 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 08:57:40 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 08:57:40 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 08:57:40 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:40 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 08:57:41 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 08:57:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:41 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 08:57:41 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 08:57:41 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 08:57:41 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 08:57:41 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 08:57:41 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 08:57:41 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 08:57:41 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 08:57:41 INFO WorkspaceExplorerServiceImpl:142 - end time - 214 msc 0 sec +2016-04-11 08:57:41 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 08:57:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 08:57:48 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 08:57:48 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 08:57:48 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-11 08:57:48 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 08:57:48 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 08:57:48 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 08:57:48 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 08:57:48 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 08:57:48 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-11 08:57:48 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-11 08:57:48 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-11 08:57:48 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-11 08:57:48 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-11 08:57:48 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-11 08:57:48 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 08:57:48 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 08:57:48 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 08:57:48 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 08:57:48 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-11 08:57:48 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 08:57:48 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 08:57:48 DEBUG WPS2SM:201 - Schema: null +2016-04-11 08:57:48 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 08:57:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 08:57:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-11 08:57:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:48 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 08:57:48 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 08:57:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:48 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 08:57:48 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 08:57:48 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 08:57:48 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 08:57:48 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-11 08:57:48 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 08:57:48 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-11 08:57:48 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 08:57:48 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-11 08:57:48 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 08:57:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-11 08:57:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 08:57:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:48 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 08:57:48 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-11 08:57:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:48 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 08:57:48 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-11 08:57:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 08:57:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-11 08:57:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-11 08:57:48 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-11 08:57:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 08:57:48 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 08:57:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-11 08:57:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-11 08:57:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 08:57:48 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-11 08:57:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 08:57:48 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 08:57:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-11 08:57:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-11 08:57:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 08:57:48 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-11 08:57:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:48 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 08:57:48 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-11 08:57:48 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-11 08:57:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-11 08:57:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-11 08:57:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:48 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-11 08:57:48 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-11 08:57:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 08:57:48 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 08:57:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-11 08:57:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-11 08:57:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 08:57:48 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 08:57:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 08:57:48 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 08:57:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 08:57:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 08:57:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 08:57:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 08:57:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 08:57:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 08:57:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 08:57:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 08:57:48 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 08:57:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 08:57:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 08:57:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 08:57:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 08:57:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 08:57:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 08:57:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 08:57:48 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 08:57:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 08:57:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 08:57:48 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 08:57:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 08:57:48 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 08:57:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 08:57:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 08:57:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 08:57:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 08:57:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 08:57:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 08:57:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 08:57:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 08:57:48 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 08:57:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 08:57:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 08:57:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 08:57:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 08:57:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 08:57:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 08:57:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 08:57:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 08:57:48 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 08:57:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:49 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 08:57:49 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 08:57:49 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 08:57:49 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 08:57:49 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 08:57:49 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 08:57:49 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 08:57:49 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 08:57:49 INFO WorkspaceExplorerServiceImpl:142 - end time - 178 msc 0 sec +2016-04-11 08:57:49 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 08:57:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 08:57:59 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 08:57:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 08:57:59 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS +2016-04-11 08:57:59 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 08:57:59 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 08:57:59 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS + XMEANS + A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + + + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + + + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 08:57:59 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 08:57:59 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 08:57:59 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 08:57:59 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 08:57:59 DEBUG SClient4WPS:290 - WPSClient->Input: + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + +2016-04-11 08:57:59 DEBUG SClient4WPS:290 - WPSClient->Input: + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + +2016-04-11 08:57:59 DEBUG SClient4WPS:290 - WPSClient->Input: + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + +2016-04-11 08:57:59 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 08:57:59 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 08:57:59 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 08:57:59 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 08:57:59 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 08:57:59 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 08:57:59 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 08:57:59 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 08:57:59 DEBUG WPS2SM:201 - Schema: null +2016-04-11 08:57:59 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 08:57:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 08:57:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 08:57:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:59 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 08:57:59 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 08:57:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:59 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 08:57:59 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 08:57:59 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 08:57:59 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 08:57:59 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 08:57:59 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 08:57:59 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 08:57:59 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 08:57:59 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 08:57:59 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 08:57:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 08:57:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 08:57:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:59 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 08:57:59 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 08:57:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:59 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 08:57:59 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 08:57:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 08:57:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 08:57:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 08:57:59 DEBUG WPS2SM:254 - Conversion to SM Type->maxIterations is a Literal Input +2016-04-11 08:57:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 08:57:59 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 08:57:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:XMeans max number of overall iterations of the clustering learning +2016-04-11 08:57:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxIterations +2016-04-11 08:57:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 08:57:59 DEBUG WPS2SM:254 - Conversion to SM Type->minClusters is a Literal Input +2016-04-11 08:57:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 08:57:59 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 08:57:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:minimum number of expected clusters +2016-04-11 08:57:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minClusters +2016-04-11 08:57:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 08:57:59 DEBUG WPS2SM:254 - Conversion to SM Type->maxClusters is a Literal Input +2016-04-11 08:57:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 08:57:59 DEBUG WPS2SM:101 - Guessed default value: 50 +2016-04-11 08:57:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of clusters to produce +2016-04-11 08:57:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxClusters +2016-04-11 08:57:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT] +2016-04-11 08:57:59 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 08:57:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 08:57:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 08:57:59 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 08:57:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 08:57:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 08:57:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 08:57:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 08:57:59 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 08:57:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 08:57:59 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 08:57:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 08:57:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 08:57:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 08:57:59 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 08:57:59 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 08:57:59 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 08:57:59 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 08:57:59 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 08:57:59 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 08:57:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 08:57:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 08:57:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 08:57:59 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 08:57:59 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 08:57:59 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 08:57:59 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 08:57:59 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 08:57:59 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:59 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 08:57:59 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 08:57:59 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 08:57:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 08:57:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 08:57:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 08:57:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-11 08:57:59 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-11 08:57:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-11 08:57:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 08:57:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 08:57:59 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 08:57:59 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 08:57:59 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 08:57:59 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 08:57:59 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:59 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 08:57:59 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 08:57:59 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 08:57:59 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 08:57:59 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:59 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:59 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:59 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 08:57:59 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 08:57:59 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:59 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:59 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-11 08:57:59 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-11 08:57:59 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 08:57:59 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:59 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 08:57:59 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 08:57:59 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 08:57:59 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 08:57:59 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 08:57:59 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 08:57:59 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 08:57:59 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 08:57:59 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 08:57:59 INFO WorkspaceExplorerServiceImpl:142 - end time - 195 msc 0 sec +2016-04-11 08:57:59 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 08:58:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 08:58:14 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 08:59:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 08:59:09 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:00:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:00:04 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:00:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:00:59 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:01:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:01:54 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:02:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:02:49 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:04:36 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-11 09:04:36 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-11 09:04:36 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-11 09:04:36 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 09:04:36 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:04:36 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:04:36 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 09:04:36 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@29186429 +2016-04-11 09:04:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:04:36 INFO ASLSession:352 - Logging the entrance +2016-04-11 09:04:36 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 09:04:36 DEBUG TemplateModel:83 - 2016-04-11 09:04:36, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 09:04:36 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:04:36 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-11 09:04:39 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 09:04:40 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 09:04:40 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 09:04:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:04:40 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:04:40 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:04:40 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:04:40 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 120 ms +2016-04-11 09:04:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-11 09:04:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-11 09:04:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-11 09:04:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-11 09:04:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-11 09:04:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-11 09:04:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-11 09:04:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-11 09:04:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-11 09:04:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-11 09:04:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-11 09:04:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-11 09:04:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-11 09:04:40 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-11 09:04:40 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-11 09:04:40 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:04:40 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:04:40 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6441caeb +2016-04-11 09:04:40 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@48375700 +2016-04-11 09:04:40 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@523596e8 +2016-04-11 09:04:40 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@40cb4833 +2016-04-11 09:04:40 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 125 ms +2016-04-11 09:04:40 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-11 09:04:40 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-11 09:04:40 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-11 09:04:40 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-11 09:04:40 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-11 09:04:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:04:40 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:04:40 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-11 09:04:40 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 25 ms +2016-04-11 09:04:40 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-11 09:04:40 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:04:40 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-11 09:04:40 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:04:41 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:04:41 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-11 09:04:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:04:44 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:04:44 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:04:44 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-11 09:04:44 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:04:44 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:04:44 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:04:44 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:04:44 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:04:44 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:04:44 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:04:44 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-11 09:04:44 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-11 09:04:44 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:04:44 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:04:44 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:04:44 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:04:44 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:04:44 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:04:44 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:04:44 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:04:44 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:04:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:04:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:04:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:44 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:04:44 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:04:44 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:04:44 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:04:44 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:04:44 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:04:44 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:04:44 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:04:44 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:04:44 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:04:44 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:04:44 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:04:44 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:04:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:04:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:04:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:44 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:04:44 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:04:44 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:04:44 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:04:44 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:04:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:04:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:04:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:44 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:04:44 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-11 09:04:44 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:04:44 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:04:44 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:04:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-11 09:04:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-11 09:04:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:44 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:04:44 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:04:44 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:04:44 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:04:44 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:04:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-11 09:04:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:04:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:44 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:04:44 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-11 09:04:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:04:44 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:04:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:04:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:04:44 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:04:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:04:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:04:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:04:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:04:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:04:45 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:04:45 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:04:45 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:04:45 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-11 09:04:45 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:04:45 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:04:45 DEBUG JCRRepository:271 - Initialize repository +2016-04-11 09:04:45 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-11 09:04:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-11 09:04:45 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:04:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-11 09:04:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 20 ms +2016-04-11 09:04:45 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-11 09:04:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:04:45 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:04:45 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:04:45 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:04:45 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:04:45 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:04:45 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-11 09:04:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:04:45 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-11 09:04:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:04:45 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:04:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:04:45 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:04:45 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:04:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:04:45 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:04:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:04:45 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:04:45 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:04:45 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:04:45 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:04:45 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:04:45 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:04:45 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:04:45 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:04:45 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-11 09:04:45 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:04:45 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:04:45 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-11 09:04:45 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-11 09:04:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:04:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:04:45 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:04:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:04:45 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:04:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:04:45 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:04:45 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:04:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:45 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:04:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:04:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:04:45 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:04:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:04:45 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:04:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:45 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:04:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-11 09:04:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:04:45 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:04:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:04:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 31 ms +2016-04-11 09:04:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-11 09:04:46 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:04:46 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-11 09:04:46 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-11 09:04:46 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-11 09:04:46 INFO ISClientConnector:82 - found only one RR, take it +2016-04-11 09:04:46 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-11 09:04:46 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-11 09:04:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-11 09:04:46 DEBUG StorageClient:517 - set scope: /gcube +2016-04-11 09:04:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-11 09:04:46 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:04:46 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:04:46 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-11 09:04:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-11 09:04:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-11 09:04:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-11 09:04:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:04:46 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:04:46 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:04:46 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:04:46 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:04:46 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:04:46 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:04:46 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:04:46 INFO WorkspaceExplorerServiceImpl:142 - end time - 419 msc 0 sec +2016-04-11 09:04:46 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:04:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:04:50 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:04:50 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:04:50 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-11 09:04:50 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:04:50 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:04:51 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:04:51 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:04:51 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:04:51 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:04:51 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:04:51 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-11 09:04:51 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-11 09:04:51 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-11 09:04:51 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:04:51 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:04:51 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:04:51 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:04:51 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:04:51 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:04:51 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:04:51 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:04:51 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:04:51 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:04:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:04:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:04:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:51 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:04:51 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:04:51 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:04:51 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:04:51 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:04:51 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:04:51 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:04:51 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:04:51 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:04:51 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:04:51 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:04:51 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:04:51 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:04:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:04:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:04:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:51 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:04:51 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:04:51 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:04:51 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:04:51 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:04:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:04:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:04:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:51 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:04:51 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-11 09:04:51 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:04:51 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:04:51 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-11 09:04:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-11 09:04:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-11 09:04:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:51 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-11 09:04:51 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-11 09:04:51 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:04:51 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:04:51 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:04:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-11 09:04:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-11 09:04:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:51 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:04:51 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-11 09:04:51 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:04:51 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:04:51 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-11 09:04:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-11 09:04:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-11 09:04:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:51 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-11 09:04:51 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:04:51 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:04:51 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:04:51 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:04:51 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:04:51 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:04:51 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:51 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:04:51 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:04:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:04:51 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:04:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:04:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:04:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:04:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:04:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:04:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:04:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:04:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:04:51 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:04:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:04:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:04:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:04:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:04:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:04:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:04:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:04:51 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:04:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:04:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:04:51 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:04:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:04:51 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:04:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:04:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:04:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:04:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:04:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:04:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:04:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:04:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:04:51 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:04:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:04:51 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:04:51 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:04:51 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:04:51 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:04:51 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:04:51 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:04:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:04:51 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:04:51 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:51 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:51 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:04:51 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:04:51 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:04:51 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:04:51 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:04:51 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:04:51 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:04:51 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:04:51 INFO WorkspaceExplorerServiceImpl:142 - end time - 212 msc 0 sec +2016-04-11 09:04:51 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:04:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:04:55 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:04:55 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:04:55 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-11 09:04:55 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:04:55 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:04:55 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:04:56 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:04:56 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:04:56 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-11 09:04:56 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-11 09:04:56 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-11 09:04:56 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-11 09:04:56 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-11 09:04:56 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-11 09:04:56 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:04:56 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:04:56 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:04:56 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:04:56 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-11 09:04:56 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:04:56 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:04:56 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:04:56 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:04:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:04:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-11 09:04:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:56 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:04:56 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:04:56 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:04:56 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:04:56 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:04:56 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:04:56 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:04:56 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-11 09:04:56 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:04:56 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-11 09:04:56 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:04:56 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-11 09:04:56 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:04:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-11 09:04:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:04:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:56 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:04:56 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-11 09:04:56 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:04:56 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:04:56 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-11 09:04:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:04:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-11 09:04:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:56 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-11 09:04:56 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-11 09:04:56 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:04:56 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:04:56 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:04:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-11 09:04:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-11 09:04:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:56 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:04:56 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-11 09:04:56 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:04:56 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:04:56 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:04:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-11 09:04:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-11 09:04:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:56 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:04:56 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-11 09:04:56 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:04:56 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:04:56 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-11 09:04:56 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-11 09:04:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-11 09:04:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-11 09:04:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:56 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-11 09:04:56 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-11 09:04:56 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:04:56 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:04:56 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:04:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-11 09:04:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-11 09:04:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:04:56 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:04:56 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:04:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:04:56 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:04:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:04:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:04:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:04:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:04:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:04:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:04:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:04:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:04:56 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:04:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:04:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:04:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:04:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:04:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:04:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:04:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:04:56 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:04:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:04:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:04:56 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:04:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:04:56 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:04:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:04:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:04:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:04:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:04:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:04:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:04:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:04:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:04:56 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:04:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:04:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:04:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:04:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:04:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:04:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:04:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:04:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:04:56 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:04:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:04:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:04:56 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:04:56 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:04:56 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:04:56 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:04:56 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:04:56 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:04:56 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:04:56 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:04:56 INFO WorkspaceExplorerServiceImpl:142 - end time - 202 msc 0 sec +2016-04-11 09:04:56 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:05:54 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-11 09:05:54 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-11 09:05:54 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-11 09:05:54 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 09:05:54 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:05:54 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:05:54 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 09:05:54 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@1b712b5e +2016-04-11 09:05:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:05:54 INFO ASLSession:352 - Logging the entrance +2016-04-11 09:05:54 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 09:05:54 DEBUG TemplateModel:83 - 2016-04-11 09:05:54, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 09:05:54 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:05:54 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-11 09:06:05 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 09:06:05 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 09:06:05 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 09:06:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:06:05 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:06:05 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:06:05 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:06:05 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 120 ms +2016-04-11 09:06:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-11 09:06:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-11 09:06:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-11 09:06:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-11 09:06:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-11 09:06:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-11 09:06:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-11 09:06:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-11 09:06:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-11 09:06:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-11 09:06:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-11 09:06:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-11 09:06:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-11 09:06:05 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-11 09:06:05 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-11 09:06:06 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:06:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:06:06 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@7a206b61 +2016-04-11 09:06:06 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@10b871a6 +2016-04-11 09:06:06 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@7139ad77 +2016-04-11 09:06:06 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@3988fd25 +2016-04-11 09:06:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 94 ms +2016-04-11 09:06:06 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-11 09:06:06 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-11 09:06:06 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-11 09:06:06 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-11 09:06:06 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-11 09:06:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:06:06 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:06:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-11 09:06:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 24 ms +2016-04-11 09:06:06 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-11 09:06:06 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:06:06 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-11 09:06:06 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:06:07 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:06:07 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-11 09:06:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:06:10 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:06:10 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:06:10 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-11 09:06:10 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:06:10 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:06:11 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:06:11 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:06:11 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:06:11 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:06:11 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:06:11 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-11 09:06:11 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-11 09:06:11 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:06:11 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:06:11 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:06:11 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:06:11 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:06:11 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:06:11 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:06:11 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:06:11 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:06:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:06:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:06:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:11 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:06:11 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:06:11 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:06:11 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:06:11 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:06:11 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:06:11 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:06:11 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:06:11 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:06:11 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:06:11 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:06:11 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:06:11 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:06:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:06:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:06:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:11 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:06:11 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:06:11 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:06:11 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:06:11 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:06:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:06:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:06:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:11 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:06:11 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-11 09:06:11 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:06:11 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:06:11 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:06:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-11 09:06:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-11 09:06:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:11 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:06:11 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:06:11 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:06:11 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:06:11 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:06:11 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-11 09:06:11 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:06:11 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:11 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:06:11 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-11 09:06:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:06:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:06:11 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:06:11 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:06:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:06:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:06:11 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:06:11 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:06:11 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:06:11 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:06:11 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:06:11 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:06:11 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:06:11 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:06:11 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:06:11 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-11 09:06:11 DEBUG JCRRepository:271 - Initialize repository +2016-04-11 09:06:11 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-11 09:06:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-11 09:06:11 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:06:11 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-11 09:06:11 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-11 09:06:11 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-11 09:06:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:06:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:06:11 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:06:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:06:11 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:06:11 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:06:11 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:06:11 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:06:11 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:06:11 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:06:11 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:06:11 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:06:11 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:06:11 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:06:11 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:06:11 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:06:11 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-11 09:06:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:06:11 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-11 09:06:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:06:11 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:06:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:06:11 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:06:11 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:06:11 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:06:11 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:06:11 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:06:11 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:06:11 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-11 09:06:11 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:06:11 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:06:11 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-11 09:06:11 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-11 09:06:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:06:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:06:11 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:06:11 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:06:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:06:11 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:06:11 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:06:11 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:06:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:11 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:06:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:11 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:06:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:11 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:06:12 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:06:12 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:06:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:12 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:06:12 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:06:12 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:06:12 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 25 ms +2016-04-11 09:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:06:12 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:06:12 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-11 09:06:12 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-11 09:06:12 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-11 09:06:12 INFO ISClientConnector:82 - found only one RR, take it +2016-04-11 09:06:12 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-11 09:06:12 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-11 09:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:06:12 DEBUG StorageClient:517 - set scope: /gcube +2016-04-11 09:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:06:12 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:06:12 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:06:12 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 18 ms +2016-04-11 09:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:06:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:06:12 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:06:12 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:06:12 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:06:12 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:06:12 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:06:12 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:06:12 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:06:12 INFO WorkspaceExplorerServiceImpl:142 - end time - 399 msc 0 sec +2016-04-11 09:06:12 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:06:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:06:17 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:06:17 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:06:17 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-11 09:06:17 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:06:17 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:06:18 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:06:18 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:06:18 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:06:18 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:06:18 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:06:18 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-11 09:06:18 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-11 09:06:18 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-11 09:06:18 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:06:18 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:06:18 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:06:18 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:06:18 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:06:18 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:06:18 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:06:18 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:06:18 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:06:18 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:06:18 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:06:18 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:06:18 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:18 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:06:18 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:06:18 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:06:18 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:06:18 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:06:18 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:06:18 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:06:18 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:06:18 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:06:18 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:06:18 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:06:18 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:06:18 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:06:18 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:06:18 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:06:18 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:18 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:06:18 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:06:18 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:06:18 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:06:18 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:06:18 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:06:18 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:06:18 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:18 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:06:18 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-11 09:06:18 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:06:18 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:06:18 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-11 09:06:18 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-11 09:06:18 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-11 09:06:18 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:18 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-11 09:06:18 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-11 09:06:18 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:06:18 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:06:18 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:06:18 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-11 09:06:18 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-11 09:06:18 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:18 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:06:18 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-11 09:06:18 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:06:18 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:06:18 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-11 09:06:18 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-11 09:06:18 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-11 09:06:18 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:18 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-11 09:06:18 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:06:18 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:06:18 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:06:18 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:06:18 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:06:18 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:06:18 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:18 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:06:18 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:06:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:06:18 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:06:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:06:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:06:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:06:18 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:06:18 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:06:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:06:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:06:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:06:18 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:06:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:06:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:06:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:06:18 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:06:18 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:06:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:06:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:06:18 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:06:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:06:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:06:18 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:06:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:06:18 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:06:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:06:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:06:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:06:18 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:06:18 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:06:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:06:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:06:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:06:18 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:06:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:06:18 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:06:18 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:06:18 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:06:18 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:06:18 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:06:18 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:06:18 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:18 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:06:19 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:06:19 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:19 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:19 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:19 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:06:19 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:06:19 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:06:19 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:06:19 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:06:19 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:06:19 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:06:19 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:06:19 INFO WorkspaceExplorerServiceImpl:142 - end time - 188 msc 0 sec +2016-04-11 09:06:19 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:06:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:06:23 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:06:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:06:23 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS +2016-04-11 09:06:23 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:06:23 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:06:23 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS + XMEANS + A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + + + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + + + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:06:23 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:06:23 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:06:23 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:06:23 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:06:23 DEBUG SClient4WPS:290 - WPSClient->Input: + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + +2016-04-11 09:06:23 DEBUG SClient4WPS:290 - WPSClient->Input: + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + +2016-04-11 09:06:23 DEBUG SClient4WPS:290 - WPSClient->Input: + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + +2016-04-11 09:06:23 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:06:23 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:06:23 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:06:23 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:06:23 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:06:23 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:06:23 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:06:23 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:06:23 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:06:23 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:06:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:06:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:06:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:23 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:06:23 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:06:23 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:06:23 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:06:23 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:06:23 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:06:23 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:06:23 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:06:23 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:06:23 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:06:23 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:06:23 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:06:23 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:06:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:06:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:06:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:23 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:06:23 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:06:23 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:06:23 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:06:23 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:06:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:06:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:06:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:06:23 DEBUG WPS2SM:254 - Conversion to SM Type->maxIterations is a Literal Input +2016-04-11 09:06:23 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:06:23 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:06:23 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:06:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:XMeans max number of overall iterations of the clustering learning +2016-04-11 09:06:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxIterations +2016-04-11 09:06:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:06:23 DEBUG WPS2SM:254 - Conversion to SM Type->minClusters is a Literal Input +2016-04-11 09:06:23 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:06:23 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:06:23 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:06:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:minimum number of expected clusters +2016-04-11 09:06:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minClusters +2016-04-11 09:06:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:06:23 DEBUG WPS2SM:254 - Conversion to SM Type->maxClusters is a Literal Input +2016-04-11 09:06:23 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:06:23 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:06:23 DEBUG WPS2SM:101 - Guessed default value: 50 +2016-04-11 09:06:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of clusters to produce +2016-04-11 09:06:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxClusters +2016-04-11 09:06:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT] +2016-04-11 09:06:23 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:06:23 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:06:23 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:06:23 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:06:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:06:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:06:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:06:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:06:23 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:06:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:06:23 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:06:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:06:23 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:06:23 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:06:23 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:06:23 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:06:23 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:06:23 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:06:23 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:06:23 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:06:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:06:23 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:06:23 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:06:23 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:06:23 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:06:23 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:06:23 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:06:23 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:06:23 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:23 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:06:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:06:23 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:06:23 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:23 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:23 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:23 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:06:23 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:06:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:06:23 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:06:23 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:06:23 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:06:23 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:06:23 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:06:23 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:06:23 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:06:23 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:06:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:06:23 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:06:23 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:06:23 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:06:23 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:06:23 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:06:23 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:06:23 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:23 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:23 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:23 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:06:23 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:06:23 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:06:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:24 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:06:24 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:06:24 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:06:24 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:06:24 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:06:24 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:06:24 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:06:24 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:06:24 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:06:24 INFO WorkspaceExplorerServiceImpl:142 - end time - 168 msc 0 sec +2016-04-11 09:06:24 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:07:05 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-11 09:07:05 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-11 09:07:05 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-11 09:07:05 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 09:07:05 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:07:05 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:07:05 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 09:07:05 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2575659e +2016-04-11 09:07:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:07:05 INFO ASLSession:352 - Logging the entrance +2016-04-11 09:07:05 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 09:07:05 DEBUG TemplateModel:83 - 2016-04-11 09:07:05, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 09:07:05 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:07:05 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-11 09:07:11 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 09:07:11 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 09:07:11 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 09:07:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:07:11 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:07:11 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:07:11 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:07:11 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 123 ms +2016-04-11 09:07:11 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-11 09:07:11 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-11 09:07:11 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-11 09:07:11 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-11 09:07:11 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-11 09:07:11 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-11 09:07:11 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-11 09:07:11 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-11 09:07:11 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-11 09:07:11 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-11 09:07:11 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-11 09:07:11 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-11 09:07:11 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-11 09:07:11 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-11 09:07:11 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-11 09:07:11 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:07:11 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:07:11 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@7e246fb3 +2016-04-11 09:07:11 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@5e42bef1 +2016-04-11 09:07:11 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@363c2835 +2016-04-11 09:07:11 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@19f1f1db +2016-04-11 09:07:11 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 120 ms +2016-04-11 09:07:11 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-11 09:07:11 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-11 09:07:11 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-11 09:07:11 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-11 09:07:11 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-11 09:07:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:07:11 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:07:11 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-11 09:07:11 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 32 ms +2016-04-11 09:07:11 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-11 09:07:11 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:07:11 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-11 09:07:11 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:07:12 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:07:12 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-11 09:07:56 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-11 09:07:56 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-11 09:07:56 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-11 09:07:56 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 09:07:56 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:07:56 DEBUG ASLSession:458 - Getting security token: null in thread 29 +2016-04-11 09:07:56 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 09:07:56 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@5af098e6 +2016-04-11 09:07:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 29 +2016-04-11 09:07:56 INFO ASLSession:352 - Logging the entrance +2016-04-11 09:07:56 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 09:07:56 DEBUG TemplateModel:83 - 2016-04-11 09:07:56, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 09:07:56 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:07:56 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-11 09:07:59 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 09:07:59 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 09:07:59 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 09:07:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:07:59 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:07:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:07:59 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:07:59 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 124 ms +2016-04-11 09:07:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-11 09:07:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-11 09:07:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-11 09:07:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-11 09:07:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-11 09:07:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-11 09:07:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-11 09:07:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-11 09:07:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-11 09:07:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-11 09:07:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-11 09:07:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-11 09:07:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-11 09:07:59 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-11 09:07:59 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-11 09:07:59 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:07:59 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:07:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@c780239 +2016-04-11 09:07:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@34daac03 +2016-04-11 09:07:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@18c8cec5 +2016-04-11 09:07:59 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@75233969 +2016-04-11 09:07:59 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 98 ms +2016-04-11 09:07:59 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-11 09:07:59 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-11 09:07:59 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-11 09:07:59 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-11 09:07:59 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-11 09:07:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:08:00 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:08:00 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-11 09:08:00 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 21 ms +2016-04-11 09:08:00 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-11 09:08:00 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:08:00 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-11 09:08:00 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:08:00 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:08:00 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-11 09:08:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:08:04 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:08:04 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:08:04 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING +2016-04-11 09:08:04 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:08:04 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:08:04 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING + MAX_ENT_NICHE_MODELLING + A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt + + + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + + + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + + + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + + + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + + + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + + + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + + + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + + + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + + + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + + + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + + + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + + + + + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + + + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + + + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + + + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + + + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + + + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:08:04 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:08:04 DEBUG SClient4WPS:290 - WPSClient->Input: + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + +2016-04-11 09:08:04 DEBUG SClient4WPS:290 - WPSClient->Input: + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + +2016-04-11 09:08:04 DEBUG SClient4WPS:290 - WPSClient->Input: + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + +2016-04-11 09:08:04 DEBUG SClient4WPS:290 - WPSClient->Input: + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + +2016-04-11 09:08:04 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:08:04 DEBUG SClient4WPS:290 - WPSClient->Input: + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + +2016-04-11 09:08:04 DEBUG SClient4WPS:290 - WPSClient->Input: + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + +2016-04-11 09:08:04 DEBUG SClient4WPS:290 - WPSClient->Input: + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + +2016-04-11 09:08:04 DEBUG SClient4WPS:290 - WPSClient->Input: + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + +2016-04-11 09:08:04 DEBUG SClient4WPS:290 - WPSClient->Input: + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + +2016-04-11 09:08:04 DEBUG SClient4WPS:290 - WPSClient->Input: + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + +2016-04-11 09:08:04 DEBUG SClient4WPS:290 - WPSClient->Input: + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + +2016-04-11 09:08:04 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:08:04 DEBUG SClient4WPS:297 - WPSClient->Output: + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + +2016-04-11 09:08:04 DEBUG SClient4WPS:297 - WPSClient->Output: + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + +2016-04-11 09:08:04 DEBUG SClient4WPS:297 - WPSClient->Output: + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + +2016-04-11 09:08:04 DEBUG SClient4WPS:297 - WPSClient->Output: + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + +2016-04-11 09:08:04 DEBUG SClient4WPS:297 - WPSClient->Output: + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + +2016-04-11 09:08:04 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:08:04 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:08:04 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:08:04 DEBUG WPS2SM:254 - Conversion to SM Type->OutputTableLabel is a Literal Input +2016-04-11 09:08:04 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:08:04 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:08:04 DEBUG WPS2SM:101 - Guessed default value: maxent_ +2016-04-11 09:08:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the table to produce +2016-04-11 09:08:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OutputTableLabel +2016-04-11 09:08:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:08:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT] +2016-04-11 09:08:04 DEBUG WPS2SM:254 - Conversion to SM Type->SpeciesName is a Literal Input +2016-04-11 09:08:04 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:08:04 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:08:04 DEBUG WPS2SM:101 - Guessed default value: generic_species +2016-04-11 09:08:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the species to model and the occurrence records refer to +2016-04-11 09:08:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:SpeciesName +2016-04-11 09:08:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:08:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT] +2016-04-11 09:08:04 DEBUG WPS2SM:254 - Conversion to SM Type->MaxIterations is a Literal Input +2016-04-11 09:08:04 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:08:04 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:08:04 DEBUG WPS2SM:101 - Guessed default value: 1000 +2016-04-11 09:08:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The number of learning iterations of the MaxEnt algorithm +2016-04-11 09:08:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:MaxIterations +2016-04-11 09:08:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:08:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT] +2016-04-11 09:08:04 DEBUG WPS2SM:254 - Conversion to SM Type->DefaultPrevalence is a Literal Input +2016-04-11 09:08:04 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:08:04 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:08:04 DEBUG WPS2SM:101 - Guessed default value: 0.5 +2016-04-11 09:08:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A priori probability of presence at ordinary occurrence points +2016-04-11 09:08:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:DefaultPrevalence +2016-04-11 09:08:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:08:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT] +2016-04-11 09:08:04 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencesTable is a Complex Input +2016-04-11 09:08:04 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:08:04 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:08:04 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:08:04 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:08:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] +2016-04-11 09:08:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencesTable +2016-04-11 09:08:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:08:04 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:08:04 DEBUG WPS2SM:254 - Conversion to SM Type->LongitudeColumn is a Literal Input +2016-04-11 09:08:04 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:08:04 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:08:04 DEBUG WPS2SM:101 - Guessed default value: decimallongitude +2016-04-11 09:08:04 DEBUG WPS2SM:130 - Machter title: The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude] +2016-04-11 09:08:04 DEBUG WPS2SM:131 - Machter find: true +2016-04-11 09:08:04 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-11 09:08:04 DEBUG WPS2SM:133 - Machter start: 40 +2016-04-11 09:08:04 DEBUG WPS2SM:134 - Machter end: 82 +2016-04-11 09:08:04 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-11 09:08:04 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-11 09:08:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing longitude values [the name of a column from OccurrencesTable] +2016-04-11 09:08:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LongitudeColumn +2016-04-11 09:08:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:08:04 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN] +2016-04-11 09:08:04 DEBUG WPS2SM:254 - Conversion to SM Type->LatitudeColumn is a Literal Input +2016-04-11 09:08:04 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:08:04 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:08:04 DEBUG WPS2SM:101 - Guessed default value: decimallatitude +2016-04-11 09:08:04 DEBUG WPS2SM:130 - Machter title: The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude] +2016-04-11 09:08:04 DEBUG WPS2SM:131 - Machter find: true +2016-04-11 09:08:04 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-11 09:08:04 DEBUG WPS2SM:133 - Machter start: 39 +2016-04-11 09:08:04 DEBUG WPS2SM:134 - Machter end: 81 +2016-04-11 09:08:04 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-11 09:08:04 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-11 09:08:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing latitude values [the name of a column from OccurrencesTable] +2016-04-11 09:08:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LatitudeColumn +2016-04-11 09:08:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:08:04 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN] +2016-04-11 09:08:04 DEBUG WPS2SM:254 - Conversion to SM Type->XResolution is a Literal Input +2016-04-11 09:08:04 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:08:04 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:08:04 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:08:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the X axis in decimal degrees +2016-04-11 09:08:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:XResolution +2016-04-11 09:08:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:08:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:08:04 DEBUG WPS2SM:254 - Conversion to SM Type->YResolution is a Literal Input +2016-04-11 09:08:04 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:08:04 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:08:04 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:08:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the Y axis in decimal degrees +2016-04-11 09:08:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:YResolution +2016-04-11 09:08:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:08:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:08:04 DEBUG WPS2SM:254 - Conversion to SM Type->Layers is a Literal Input +2016-04-11 09:08:04 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:08:04 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:08:04 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:08:04 DEBUG WPS2SM:147 - Machter title: The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:08:04 DEBUG WPS2SM:148 - Machter find: true +2016-04-11 09:08:04 DEBUG WPS2SM:149 - Machter group: a sequence of values separated by | +2016-04-11 09:08:04 DEBUG WPS2SM:150 - Machter start: 501 +2016-04-11 09:08:04 DEBUG WPS2SM:151 - Machter end: 536 +2016-04-11 09:08:04 DEBUG WPS2SM:152 - Machter Group Count: 1 +2016-04-11 09:08:04 DEBUG WPS2SM:155 - Matcher separator: | +2016-04-11 09:08:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) +2016-04-11 09:08:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Layers +2016-04-11 09:08:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:08:04 DEBUG SClient4WPS:645 - InputParameter: ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST] +2016-04-11 09:08:04 DEBUG WPS2SM:254 - Conversion to SM Type->Z is a Literal Input +2016-04-11 09:08:04 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:08:04 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:08:04 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-11 09:08:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer +2016-04-11 09:08:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Z +2016-04-11 09:08:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:08:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-11 09:08:04 DEBUG WPS2SM:254 - Conversion to SM Type->TimeIndex is a Literal Input +2016-04-11 09:08:04 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:08:04 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:08:04 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-11 09:08:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Time Index. The default is the first time indexed in the input environmental datasets +2016-04-11 09:08:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:TimeIndex +2016-04-11 09:08:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:08:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-11 09:08:04 DEBUG SClient4WPS:649 - Parameters: [ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT], TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN], Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST], ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT]] +2016-04-11 09:08:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:08:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:08:05 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:08:05 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:08:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:08:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:08:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:08:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:08:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:08:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:08:05 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:08:05 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:08:05 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:08:05 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-11 09:08:05 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:08:05 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:08:05 DEBUG JCRRepository:271 - Initialize repository +2016-04-11 09:08:05 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-11 09:08:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-11 09:08:05 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:08:05 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-11 09:08:05 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 16 ms +2016-04-11 09:08:05 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-11 09:08:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:08:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:08:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:08:05 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-11 09:08:05 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:08:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:08:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:08:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:08:05 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-11 09:08:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:08:05 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:08:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:08:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:08:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:08:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:08:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:08:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:08:05 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:08:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:08:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:08:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:08:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:08:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:08:05 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:08:05 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:08:05 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-11 09:08:05 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:08:05 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:08:05 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-11 09:08:05 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-11 09:08:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:08:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:08:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:08:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:08:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:08:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:08:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:08:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:08:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:08:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:08:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:08:05 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:08:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:08:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:08:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:08:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:08:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:08:05 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:08:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:08:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:08:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:08:06 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:08:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:08:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:08:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:08:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:08:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:08:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:08:06 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:08:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-11 09:08:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:08:06 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:08:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:08:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 31 ms +2016-04-11 09:08:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-11 09:08:06 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:08:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-11 09:08:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 22 ms +2016-04-11 09:08:06 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-11 09:08:06 INFO ISClientConnector:82 - found only one RR, take it +2016-04-11 09:08:06 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-11 09:08:06 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-11 09:08:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-11 09:08:06 DEBUG StorageClient:517 - set scope: /gcube +2016-04-11 09:08:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-11 09:08:06 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:08:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:08:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 18 ms +2016-04-11 09:08:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-11 09:08:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-11 09:08:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-11 09:08:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:08:06 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:08:06 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:08:06 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:08:06 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:08:06 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:08:06 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:08:06 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:08:06 INFO WorkspaceExplorerServiceImpl:142 - end time - 409 msc 0 sec +2016-04-11 09:08:06 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:08:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:08:13 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:08:13 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:08:13 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY +2016-04-11 09:08:13 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:08:13 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:08:13 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY + CMSY + An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner. + + + IDsFile + Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK + Name of the parameter: IDsFile. Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK + + + + + + + StocksFile + Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY + Name of the parameter: StocksFile. Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY + + + + + + + SelectedStock + The stock on which the procedure has to focus e.g. HLH_M07 + Name of the parameter: SelectedStock. The stock on which the procedure has to focus e.g. HLH_M07 + + + + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:08:13 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:08:13 DEBUG SClient4WPS:290 - WPSClient->Input: + IDsFile + Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK + Name of the parameter: IDsFile. Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK + + + + + +2016-04-11 09:08:13 DEBUG SClient4WPS:290 - WPSClient->Input: + StocksFile + Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY + Name of the parameter: StocksFile. Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY + + + + + +2016-04-11 09:08:13 DEBUG SClient4WPS:290 - WPSClient->Input: + SelectedStock + The stock on which the procedure has to focus e.g. HLH_M07 + Name of the parameter: SelectedStock. The stock on which the procedure has to focus e.g. HLH_M07 + + + + + +2016-04-11 09:08:13 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:08:13 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:08:13 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:08:13 DEBUG WPS2SM:254 - Conversion to SM Type->IDsFile is a Literal Input +2016-04-11 09:08:13 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:08:13 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:08:13 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:08:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK +2016-04-11 09:08:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:IDsFile +2016-04-11 09:08:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:08:13 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=IDsFile, description=Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT] +2016-04-11 09:08:13 DEBUG WPS2SM:254 - Conversion to SM Type->StocksFile is a Literal Input +2016-04-11 09:08:13 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:08:13 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:08:13 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:08:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY +2016-04-11 09:08:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:StocksFile +2016-04-11 09:08:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:08:13 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=StocksFile, description=Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT] +2016-04-11 09:08:13 DEBUG WPS2SM:254 - Conversion to SM Type->SelectedStock is a Literal Input +2016-04-11 09:08:13 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:08:13 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:08:13 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:08:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The stock on which the procedure has to focus e.g. HLH_M07 +2016-04-11 09:08:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:SelectedStock +2016-04-11 09:08:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:08:13 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=SelectedStock, description=The stock on which the procedure has to focus e.g. HLH_M07 [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT] +2016-04-11 09:08:13 DEBUG SClient4WPS:649 - Parameters: [ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=IDsFile, description=Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=StocksFile, description=Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=SelectedStock, description=The stock on which the procedure has to focus e.g. HLH_M07 [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT]] +2016-04-11 09:08:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:08:51 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:09:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:09:46 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:10:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:10:41 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:11:04 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-11 09:11:04 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-11 09:11:04 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-11 09:11:04 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 09:11:04 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:11:04 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:11:04 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 09:11:04 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@21fabc2 +2016-04-11 09:11:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:11:04 INFO ASLSession:352 - Logging the entrance +2016-04-11 09:11:04 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 09:11:04 DEBUG TemplateModel:83 - 2016-04-11 09:11:04, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 09:11:04 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:11:04 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-11 09:11:08 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 09:11:08 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 09:11:08 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 09:11:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:11:08 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:11:08 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:11:08 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:11:08 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 120 ms +2016-04-11 09:11:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-11 09:11:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-11 09:11:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-11 09:11:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-11 09:11:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-11 09:11:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-11 09:11:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-11 09:11:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-11 09:11:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-11 09:11:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-11 09:11:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-11 09:11:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-11 09:11:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-11 09:11:08 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-11 09:11:08 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-11 09:11:08 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:11:08 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:11:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@7f5f1414 +2016-04-11 09:11:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@f11384f +2016-04-11 09:11:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@26c07cae +2016-04-11 09:11:08 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@7cca6b6b +2016-04-11 09:11:08 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 117 ms +2016-04-11 09:11:09 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-11 09:11:09 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-11 09:11:09 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-11 09:11:09 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-11 09:11:09 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-11 09:11:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:11:09 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:11:09 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-11 09:11:09 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-11 09:11:09 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-11 09:11:09 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:11:09 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-11 09:11:09 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:11:09 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:11:09 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-11 09:11:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:11:14 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:11:14 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:11:14 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-11 09:11:14 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:11:14 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:11:14 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:11:14 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:11:14 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:11:14 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:11:14 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:11:14 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-11 09:11:14 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-11 09:11:15 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:11:15 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:11:15 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:11:15 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:11:15 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:11:15 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:11:15 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:11:15 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:11:15 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:11:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:11:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:11:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:11:15 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:11:15 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:11:15 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:11:15 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:11:15 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:11:15 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:11:15 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:11:15 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:11:15 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:11:15 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:11:15 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:11:15 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:11:15 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:11:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:11:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:11:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:11:15 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:11:15 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:11:15 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:11:15 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:11:15 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:11:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:11:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:11:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:11:15 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:11:15 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-11 09:11:15 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:11:15 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:11:15 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:11:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-11 09:11:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-11 09:11:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:11:15 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:11:15 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:11:15 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:11:15 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:11:15 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:11:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-11 09:11:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:11:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:11:15 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:11:15 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-11 09:11:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:11:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:11:15 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:11:15 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:11:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:11:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:11:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:11:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:11:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:11:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:11:15 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:11:15 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:11:15 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:11:15 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:11:15 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:11:15 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-11 09:11:15 DEBUG JCRRepository:271 - Initialize repository +2016-04-11 09:11:15 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-11 09:11:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-11 09:11:15 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:11:15 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-11 09:11:15 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-11 09:11:15 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-11 09:11:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:11:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:11:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:11:15 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-11 09:11:15 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:11:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:11:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:11:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:11:15 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-11 09:11:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:11:15 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:11:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:11:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:11:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:11:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:11:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:11:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:11:15 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:11:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:11:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:11:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:11:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:11:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:11:15 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:11:15 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:11:15 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-11 09:11:15 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:11:15 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:11:15 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-11 09:11:15 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-11 09:11:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:11:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:11:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:11:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:11:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:11:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:11:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:11:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:11:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:11:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:11:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:11:15 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:11:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:11:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:11:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:11:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:11:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:11:16 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:11:16 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:11:16 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:11:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:11:16 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:11:16 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:11:16 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:11:16 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:11:16 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:11:16 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:11:16 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:11:16 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:11:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-11 09:11:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:11:16 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:11:16 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:11:16 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 22 ms +2016-04-11 09:11:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-11 09:11:16 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:11:16 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-11 09:11:16 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-11 09:11:16 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-11 09:11:16 INFO ISClientConnector:82 - found only one RR, take it +2016-04-11 09:11:16 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-11 09:11:16 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-11 09:11:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-11 09:11:16 DEBUG StorageClient:517 - set scope: /gcube +2016-04-11 09:11:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-11 09:11:16 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:11:16 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:11:16 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 22 ms +2016-04-11 09:11:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-11 09:11:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-11 09:11:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-11 09:11:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:11:16 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:11:16 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:11:16 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:11:16 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:11:16 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:11:16 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:11:16 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:11:16 INFO WorkspaceExplorerServiceImpl:142 - end time - 420 msc 0 sec +2016-04-11 09:11:16 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:11:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:11:22 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:11:22 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:11:22 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-11 09:11:22 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:11:22 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:11:22 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:11:22 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:11:22 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:11:22 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-11 09:11:22 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-11 09:11:22 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-11 09:11:22 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-11 09:11:22 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-11 09:11:22 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-11 09:11:22 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:11:22 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:11:22 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:11:22 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:11:22 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-11 09:11:22 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:11:22 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:11:22 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:11:22 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:11:22 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:11:22 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-11 09:11:22 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:11:22 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:11:22 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:11:22 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:11:22 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:11:22 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:11:22 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:11:22 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:11:22 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-11 09:11:22 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:11:22 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-11 09:11:22 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:11:22 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-11 09:11:22 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:11:22 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-11 09:11:22 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:11:22 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:11:22 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:11:22 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-11 09:11:22 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:11:22 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:11:22 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-11 09:11:22 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:11:22 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-11 09:11:22 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:11:22 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-11 09:11:22 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-11 09:11:22 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:11:22 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:11:22 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:11:22 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-11 09:11:22 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-11 09:11:22 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:11:22 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:11:22 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-11 09:11:22 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:11:22 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:11:22 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:11:22 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-11 09:11:22 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-11 09:11:22 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:11:22 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:11:22 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-11 09:11:22 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:11:22 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:11:22 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-11 09:11:22 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-11 09:11:22 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-11 09:11:22 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-11 09:11:22 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:11:22 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-11 09:11:22 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-11 09:11:22 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:11:22 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:11:22 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:11:22 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-11 09:11:22 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-11 09:11:22 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:11:22 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:11:22 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:11:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:11:22 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:11:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:11:22 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:11:22 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:11:22 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:11:22 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:11:22 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:11:22 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:11:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:11:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:11:22 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:11:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:11:22 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:11:22 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:11:22 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:11:22 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:11:22 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:11:22 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:11:22 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:11:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:11:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:11:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:11:22 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:11:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:11:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:11:22 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:11:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:11:22 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:11:22 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:11:22 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:11:22 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:11:22 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:11:22 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:11:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:11:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:11:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:11:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:11:22 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:11:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:11:22 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:11:22 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:11:22 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:11:22 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:11:22 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:11:22 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:11:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:11:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:11:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:11:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:11:22 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:11:22 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:11:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:11:22 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:11:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:11:22 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:11:22 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:11:23 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:11:23 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:11:23 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:11:23 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:11:23 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:11:23 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:11:23 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:11:23 INFO WorkspaceExplorerServiceImpl:142 - end time - 215 msc 0 sec +2016-04-11 09:11:23 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:11:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:11:59 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:12:30 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-11 09:12:30 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-11 09:12:30 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-11 09:12:30 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 09:12:30 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:12:30 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:12:30 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 09:12:30 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@59338efe +2016-04-11 09:12:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:12:30 INFO ASLSession:352 - Logging the entrance +2016-04-11 09:12:30 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 09:12:30 DEBUG TemplateModel:83 - 2016-04-11 09:12:30, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 09:12:30 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:12:30 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-11 09:12:34 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 09:12:34 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 09:12:34 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 09:12:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:12:34 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:12:34 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:12:34 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:12:34 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 130 ms +2016-04-11 09:12:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-11 09:12:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-11 09:12:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-11 09:12:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-11 09:12:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-11 09:12:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-11 09:12:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-11 09:12:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-11 09:12:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-11 09:12:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-11 09:12:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-11 09:12:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-11 09:12:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-11 09:12:34 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-11 09:12:34 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-11 09:12:34 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:12:34 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:12:34 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@4529e005 +2016-04-11 09:12:34 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@bf203c0 +2016-04-11 09:12:34 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@6c371bc8 +2016-04-11 09:12:34 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@1248613b +2016-04-11 09:12:34 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 100 ms +2016-04-11 09:12:34 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-11 09:12:34 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-11 09:12:34 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-11 09:12:34 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-11 09:12:34 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-11 09:12:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:12:34 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:12:34 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-11 09:12:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 33 ms +2016-04-11 09:12:35 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-11 09:12:35 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:12:35 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-11 09:12:35 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:12:35 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:12:35 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-11 09:12:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:12:38 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:12:38 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:12:38 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-11 09:12:38 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:12:38 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:12:39 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:12:39 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:12:39 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:12:39 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:12:39 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:12:39 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-11 09:12:39 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-11 09:12:39 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-11 09:12:39 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:12:39 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:12:39 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:12:39 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:12:39 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:12:39 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:12:39 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:12:39 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:12:39 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:12:39 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:12:39 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:12:39 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:12:39 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:39 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:12:39 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:12:39 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:39 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:12:39 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:12:39 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:12:39 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:12:39 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:12:39 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:12:39 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:12:39 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:12:39 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:12:39 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:12:39 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:12:39 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:12:39 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:39 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:12:39 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:12:39 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:39 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:12:39 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:12:39 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:12:39 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:12:39 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:39 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:12:39 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-11 09:12:39 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:39 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:12:39 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-11 09:12:39 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-11 09:12:39 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-11 09:12:39 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:39 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-11 09:12:39 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-11 09:12:39 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:39 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:12:39 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:12:39 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-11 09:12:39 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-11 09:12:39 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:39 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:12:39 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-11 09:12:39 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:39 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:12:39 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-11 09:12:39 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-11 09:12:39 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-11 09:12:39 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:39 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-11 09:12:39 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:12:39 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:39 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:12:39 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:12:39 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:12:39 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:12:39 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:39 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:12:39 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:12:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:12:39 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:12:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:12:39 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:12:39 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:12:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:12:39 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:12:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:12:39 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:12:39 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:12:39 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:12:39 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:12:39 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:12:39 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:12:39 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:12:39 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-11 09:12:39 DEBUG JCRRepository:271 - Initialize repository +2016-04-11 09:12:39 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-11 09:12:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-11 09:12:39 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:12:39 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-11 09:12:39 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-11 09:12:39 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-11 09:12:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:12:39 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:12:39 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:12:39 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:12:39 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:12:39 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-11 09:12:39 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:12:39 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:12:39 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-11 09:12:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:12:39 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:12:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:12:39 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:12:39 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:12:39 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:12:39 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:12:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:12:39 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:12:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:12:39 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:12:39 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:12:39 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:12:39 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:12:40 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:12:40 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:12:40 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-11 09:12:40 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:12:40 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:12:40 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-11 09:12:40 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-11 09:12:40 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:12:40 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:12:40 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:12:40 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:12:40 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:12:40 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:12:40 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:12:40 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:12:40 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:40 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:40 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:12:40 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:40 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:40 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:40 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:12:40 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:40 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:40 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:40 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:12:40 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:12:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:12:40 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:12:40 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:40 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:40 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:40 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:40 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:40 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:12:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:12:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:12:40 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:12:40 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:12:40 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 32 ms +2016-04-11 09:12:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:12:40 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:12:40 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-11 09:12:40 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-11 09:12:40 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-11 09:12:40 INFO ISClientConnector:82 - found only one RR, take it +2016-04-11 09:12:40 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-11 09:12:40 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-11 09:12:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:12:40 DEBUG StorageClient:517 - set scope: /gcube +2016-04-11 09:12:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:12:40 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:12:40 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:12:40 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-11 09:12:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:12:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:12:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:12:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:12:40 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:12:40 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:12:40 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:12:40 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:12:40 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:12:40 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:12:40 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:12:40 INFO WorkspaceExplorerServiceImpl:142 - end time - 441 msc 0 sec +2016-04-11 09:12:40 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:12:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:12:44 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:12:44 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:12:44 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-11 09:12:44 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:12:44 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:12:44 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:12:44 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:12:44 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:12:44 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-11 09:12:44 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-11 09:12:44 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-11 09:12:44 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-11 09:12:44 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-11 09:12:44 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-11 09:12:44 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:12:44 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:12:44 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:12:44 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:12:44 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-11 09:12:44 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:12:44 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:12:44 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:12:44 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:12:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:12:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-11 09:12:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:44 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:12:44 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:12:44 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:44 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:12:44 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:12:44 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:12:44 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:12:44 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-11 09:12:44 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:12:44 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-11 09:12:44 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:12:44 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-11 09:12:44 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:12:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-11 09:12:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:12:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:44 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:12:44 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-11 09:12:44 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:44 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:12:44 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-11 09:12:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:12:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-11 09:12:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:44 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-11 09:12:44 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-11 09:12:44 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:44 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:12:44 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:12:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-11 09:12:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-11 09:12:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:44 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:12:44 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-11 09:12:44 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:44 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:12:44 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:12:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-11 09:12:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-11 09:12:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:44 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:12:44 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-11 09:12:44 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:44 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:12:44 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-11 09:12:44 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-11 09:12:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-11 09:12:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-11 09:12:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:44 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-11 09:12:44 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-11 09:12:44 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:44 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:12:44 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:12:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-11 09:12:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-11 09:12:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:44 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:12:44 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:12:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:12:44 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:12:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:12:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:12:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:12:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:12:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:12:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:12:44 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:12:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:12:44 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:12:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:12:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:12:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:12:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:12:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:12:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:12:44 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:12:44 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:12:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:12:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:12:44 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:12:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:12:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:12:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:12:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:12:44 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:12:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:12:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:12:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:12:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:12:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:12:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:12:44 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:12:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:12:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:12:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:12:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:12:44 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:12:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:44 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:12:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:12:44 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:12:44 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:44 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:44 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:12:44 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:12:44 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:12:44 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:12:44 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:12:44 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:12:44 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:12:44 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:12:44 INFO WorkspaceExplorerServiceImpl:142 - end time - 207 msc 0 sec +2016-04-11 09:12:44 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:12:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:12:46 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:12:46 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:12:46 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS +2016-04-11 09:12:46 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:12:46 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:12:47 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS + XMEANS + A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + + + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + + + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:12:47 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:12:47 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:12:47 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:12:47 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:12:47 DEBUG SClient4WPS:290 - WPSClient->Input: + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + +2016-04-11 09:12:47 DEBUG SClient4WPS:290 - WPSClient->Input: + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + +2016-04-11 09:12:47 DEBUG SClient4WPS:290 - WPSClient->Input: + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + +2016-04-11 09:12:47 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:12:47 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:12:47 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:12:47 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:12:47 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:12:47 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:12:47 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:12:47 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:12:47 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:12:47 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:12:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:12:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:12:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:47 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:12:47 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:12:47 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:47 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:12:47 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:12:47 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:12:47 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:12:47 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:12:47 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:12:47 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:12:47 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:12:47 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:12:47 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:12:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:12:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:12:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:47 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:12:47 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:12:47 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:47 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:12:47 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:12:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:12:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:12:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:47 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:12:47 DEBUG WPS2SM:254 - Conversion to SM Type->maxIterations is a Literal Input +2016-04-11 09:12:47 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:47 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:12:47 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:12:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:XMeans max number of overall iterations of the clustering learning +2016-04-11 09:12:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxIterations +2016-04-11 09:12:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:47 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:12:47 DEBUG WPS2SM:254 - Conversion to SM Type->minClusters is a Literal Input +2016-04-11 09:12:47 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:47 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:12:47 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:12:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:minimum number of expected clusters +2016-04-11 09:12:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minClusters +2016-04-11 09:12:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:47 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:12:47 DEBUG WPS2SM:254 - Conversion to SM Type->maxClusters is a Literal Input +2016-04-11 09:12:47 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:47 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:12:47 DEBUG WPS2SM:101 - Guessed default value: 50 +2016-04-11 09:12:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of clusters to produce +2016-04-11 09:12:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxClusters +2016-04-11 09:12:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:47 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT] +2016-04-11 09:12:47 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:12:47 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:47 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:12:47 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:12:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:12:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:12:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:47 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:12:47 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:12:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:12:47 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:12:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:12:47 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:12:47 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:12:47 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:12:47 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:12:47 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:12:47 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:12:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:12:47 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:12:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:12:47 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:12:47 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:12:47 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:12:47 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:12:47 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:12:47 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:12:47 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:12:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:12:47 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:12:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:12:47 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:12:47 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:12:47 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:12:47 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:12:47 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:12:47 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:12:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:12:47 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:12:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:12:47 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:12:47 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:12:47 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:12:47 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:12:47 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:12:47 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:12:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:12:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:12:47 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:12:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:12:47 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:12:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:47 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:12:47 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:12:47 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:12:47 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:12:47 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:12:47 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:12:47 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:12:47 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:12:47 INFO WorkspaceExplorerServiceImpl:142 - end time - 189 msc 0 sec +2016-04-11 09:12:47 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:12:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:12:50 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:12:50 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:12:50 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-11 09:12:50 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:12:50 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:12:50 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:12:50 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:12:50 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:12:50 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:12:50 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:12:50 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-11 09:12:50 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-11 09:12:50 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:12:50 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:12:50 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:12:50 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:12:50 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:12:50 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:12:50 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:12:50 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:12:50 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:12:50 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:12:50 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:12:50 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:50 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:12:50 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:12:50 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:50 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:12:50 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:12:50 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:12:50 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:12:50 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:12:50 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:12:50 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:12:50 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:12:50 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:12:50 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:12:50 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:12:50 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:12:50 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:50 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:12:50 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:12:50 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:50 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:12:50 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:12:50 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:12:50 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:12:50 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:50 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:12:50 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-11 09:12:50 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:50 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:12:50 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:12:50 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-11 09:12:50 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-11 09:12:50 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:50 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:12:50 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:12:50 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:12:50 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:12:50 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:12:50 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-11 09:12:50 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:12:50 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:12:50 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:12:50 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-11 09:12:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:12:50 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:12:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:12:50 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:12:50 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:12:50 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:12:50 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:12:50 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:12:50 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:12:50 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:12:50 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:12:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:12:50 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:12:50 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:12:50 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:12:50 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:12:50 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:12:50 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:12:50 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:12:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:12:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:12:50 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:50 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:12:50 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:12:50 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:12:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:12:50 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:12:50 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:12:50 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:12:50 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:12:50 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:12:50 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:12:50 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:12:50 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:12:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:12:50 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:12:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:50 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:12:50 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:12:50 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:12:50 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:12:50 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:12:50 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:50 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:12:50 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:12:50 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:12:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:50 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:12:50 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:12:50 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:12:50 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:12:50 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:12:50 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:12:50 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:12:50 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:12:50 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:12:50 INFO WorkspaceExplorerServiceImpl:142 - end time - 176 msc 0 sec +2016-04-11 09:12:50 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:13:35 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-11 09:13:35 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-11 09:13:35 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-11 09:13:35 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 09:13:35 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:13:35 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:13:35 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 09:13:35 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@26cd16d +2016-04-11 09:13:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:13:35 INFO ASLSession:352 - Logging the entrance +2016-04-11 09:13:35 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 09:13:35 DEBUG TemplateModel:83 - 2016-04-11 09:13:35, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 09:13:35 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:13:35 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-11 09:13:39 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 09:13:39 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 09:13:39 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 09:13:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:13:39 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:13:39 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:13:39 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:13:39 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 121 ms +2016-04-11 09:13:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-11 09:13:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-11 09:13:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-11 09:13:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-11 09:13:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-11 09:13:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-11 09:13:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-11 09:13:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-11 09:13:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-11 09:13:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-11 09:13:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-11 09:13:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-11 09:13:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-11 09:13:39 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-11 09:13:39 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-11 09:13:39 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:13:39 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:13:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@15e48c20 +2016-04-11 09:13:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@660b8e05 +2016-04-11 09:13:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@1ceac987 +2016-04-11 09:13:39 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@1b99aed1 +2016-04-11 09:13:39 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 92 ms +2016-04-11 09:13:39 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-11 09:13:39 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-11 09:13:39 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-11 09:13:39 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-11 09:13:39 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-11 09:13:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:13:39 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:13:39 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-11 09:13:39 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-11 09:13:39 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-11 09:13:39 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:13:39 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-11 09:13:39 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:13:40 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:13:40 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-11 09:13:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:13:43 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:13:43 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:13:43 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-11 09:13:43 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:13:43 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:13:44 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:13:44 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:13:44 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:13:44 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:13:44 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:13:44 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-11 09:13:44 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-11 09:13:44 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:13:44 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:13:44 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:13:44 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:13:44 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:13:44 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:13:44 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:13:44 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:13:44 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:13:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:13:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:13:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:13:44 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:13:44 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:13:44 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:13:44 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:13:44 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:13:44 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:13:44 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:13:44 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:13:44 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:13:44 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:13:44 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:13:44 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:13:44 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:13:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:13:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:13:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:13:44 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:13:44 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:13:44 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:13:44 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:13:44 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:13:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:13:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:13:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:13:44 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:13:44 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-11 09:13:44 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:13:44 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:13:44 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:13:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-11 09:13:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-11 09:13:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:13:44 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:13:44 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:13:44 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:13:44 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:13:44 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:13:44 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-11 09:13:44 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:13:44 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:13:44 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:13:44 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-11 09:13:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:13:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:13:44 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:13:44 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:13:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:13:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:13:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:13:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:13:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:13:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:13:44 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:13:44 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:13:44 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:13:44 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-11 09:13:44 DEBUG JCRRepository:271 - Initialize repository +2016-04-11 09:13:44 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:13:44 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:13:44 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-11 09:13:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-11 09:13:44 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:13:44 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-11 09:13:44 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-11 09:13:44 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-11 09:13:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:13:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:13:44 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:13:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:13:44 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-11 09:13:44 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:13:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:13:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:13:44 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-11 09:13:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:13:44 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:13:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:13:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:13:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:13:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:13:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:13:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:13:44 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:13:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:13:44 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:13:44 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:13:44 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:13:44 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:13:45 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:13:45 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:13:45 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-11 09:13:45 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:13:45 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:13:45 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-11 09:13:45 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-11 09:13:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:13:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:13:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:13:45 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:13:45 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:13:45 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:13:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:13:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:13:45 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:13:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:13:45 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:13:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:13:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:13:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:13:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:13:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:13:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:13:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:13:45 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:13:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:13:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:13:45 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:13:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:13:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:13:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:13:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:13:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:13:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:13:45 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:13:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-11 09:13:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:13:45 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:13:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:13:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 27 ms +2016-04-11 09:13:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-11 09:13:45 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:13:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-11 09:13:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-11 09:13:45 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-11 09:13:45 INFO ISClientConnector:82 - found only one RR, take it +2016-04-11 09:13:45 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-11 09:13:45 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-11 09:13:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-11 09:13:45 DEBUG StorageClient:517 - set scope: /gcube +2016-04-11 09:13:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-11 09:13:45 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:13:45 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:13:45 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 15 ms +2016-04-11 09:13:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-11 09:13:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-11 09:13:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-11 09:13:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:13:45 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:13:45 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:13:45 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:13:45 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:13:45 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:13:45 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:13:45 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:13:45 INFO WorkspaceExplorerServiceImpl:142 - end time - 435 msc 0 sec +2016-04-11 09:13:45 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:14:30 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:14:30 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:14:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:14:48 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:14:48 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:14:48 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-11 09:14:48 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:14:48 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:14:49 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:14:49 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:14:49 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:14:49 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:14:49 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:14:49 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-11 09:14:49 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-11 09:14:49 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-11 09:14:49 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:14:49 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:14:49 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:14:49 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:14:49 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:14:49 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:14:49 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:14:49 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:14:49 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:14:49 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:14:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:14:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:14:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:14:49 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:14:49 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:14:49 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:14:49 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:14:49 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:14:49 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:14:49 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:14:49 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:14:49 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:14:49 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:14:49 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:14:49 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:14:49 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:14:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:14:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:14:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:14:49 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:14:49 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:14:49 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:14:49 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:14:49 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:14:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:14:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:14:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:14:49 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:14:49 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-11 09:14:49 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:14:49 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:14:49 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-11 09:14:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-11 09:14:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-11 09:14:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:14:49 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-11 09:14:49 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-11 09:14:49 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:14:49 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:14:49 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:14:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-11 09:14:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-11 09:14:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:14:49 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:14:49 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-11 09:14:49 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:14:49 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:14:49 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-11 09:14:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-11 09:14:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-11 09:14:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:14:49 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-11 09:14:49 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:14:49 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:14:49 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:14:49 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:14:49 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:14:49 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:14:49 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:14:49 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:14:49 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:14:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:14:49 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:14:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:14:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:14:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:14:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:14:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:14:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:14:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:14:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:14:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:14:49 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:14:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:14:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:14:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:14:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:14:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:14:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:14:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:14:49 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:14:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:14:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:14:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:14:49 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:14:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:14:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:14:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:14:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:14:49 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:14:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:14:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:14:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:14:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:14:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:14:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:14:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:14:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:14:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:14:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:14:49 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:14:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:14:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:14:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:14:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:14:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:14:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:14:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:14:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:14:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:14:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:14:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:14:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:14:49 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:14:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:14:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:14:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:14:49 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:14:49 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:14:49 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:14:49 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:14:49 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:14:49 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:14:49 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:14:49 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:14:49 INFO WorkspaceExplorerServiceImpl:142 - end time - 200 msc 0 sec +2016-04-11 09:14:49 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:15:25 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:15:25 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:15:25 WARN SessionCheckerServiceImpl:68 - Stopping session polling as i think you are in development mode +2016-04-11 09:15:31 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 09:15:31 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:15:31 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:15:31 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 09:15:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:15:31 INFO ASLSession:352 - Logging the entrance +2016-04-11 09:15:31 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 09:15:31 DEBUG TemplateModel:83 - 2016-04-11 09:15:31, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 09:15:31 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:15:31 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-11 09:15:34 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 09:15:34 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 09:15:34 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 09:15:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:15:34 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:15:34 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:15:34 INFO DiscoveryDelegate:77 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
(cached) +2016-04-11 09:15:34 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-11 09:15:34 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-11 09:15:34 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-11 09:15:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:15:34 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:15:34 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-11 09:15:34 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 23 ms +2016-04-11 09:15:34 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-11 09:15:34 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:15:34 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-11 09:15:34 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:15:34 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:15:34 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-11 09:15:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:15:38 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:15:38 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:15:38 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-11 09:15:38 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:15:38 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:15:38 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:15:38 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:15:38 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:15:38 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:15:38 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:15:38 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-11 09:15:38 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-11 09:15:38 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:15:38 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:15:38 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:15:38 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:15:38 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:15:38 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:15:38 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:15:38 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:15:38 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:15:38 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:15:38 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:15:38 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:38 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:15:38 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:15:38 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:38 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:15:38 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:15:38 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:15:38 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:15:38 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:15:38 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:15:38 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:15:38 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:15:38 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:15:38 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:15:38 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:15:38 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:15:38 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:38 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:15:38 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:15:38 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:38 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:15:38 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:15:38 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:15:38 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:15:38 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:38 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:15:38 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-11 09:15:38 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:38 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:38 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:15:38 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-11 09:15:38 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-11 09:15:38 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:38 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:15:38 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:15:38 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:38 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:38 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:15:38 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-11 09:15:38 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:15:38 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:38 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:15:38 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-11 09:15:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:15:38 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:15:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:15:38 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:38 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:38 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:38 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:38 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:15:38 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:15:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:15:38 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:38 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:38 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:38 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:38 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:38 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:15:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:15:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:15:38 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:15:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:15:38 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:15:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:15:38 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:38 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:38 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:38 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:15:38 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:15:38 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:15:38 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:38 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:38 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:38 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:38 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:38 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:38 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:15:38 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:15:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:38 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:15:38 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:15:38 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:15:38 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:15:38 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:15:38 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:15:38 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:15:38 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:15:38 INFO WorkspaceExplorerServiceImpl:142 - end time - 198 msc 0 sec +2016-04-11 09:15:38 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:15:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:15:42 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:15:42 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:15:42 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-11 09:15:42 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:15:42 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:15:42 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:15:42 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:15:42 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:15:42 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:15:42 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:15:42 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-11 09:15:42 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-11 09:15:42 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-11 09:15:42 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:15:42 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:15:42 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:15:42 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:15:42 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:15:42 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:15:42 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:15:42 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:15:42 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:15:42 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:15:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:15:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:15:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:42 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:15:42 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:15:42 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:42 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:15:42 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:15:42 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:15:42 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:15:42 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:15:42 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:15:42 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:15:42 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:15:42 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:15:42 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:15:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:15:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:15:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:42 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:15:42 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:15:42 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:42 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:15:42 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:15:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:15:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:15:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:15:42 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-11 09:15:42 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:42 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:42 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-11 09:15:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-11 09:15:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-11 09:15:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-11 09:15:42 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-11 09:15:42 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:42 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:42 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:15:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-11 09:15:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-11 09:15:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:15:42 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-11 09:15:42 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:42 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:42 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-11 09:15:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-11 09:15:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-11 09:15:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-11 09:15:42 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:15:42 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:42 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:42 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:15:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:15:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:15:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:15:42 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:15:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:15:42 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:15:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:15:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:15:42 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:15:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:15:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:42 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:15:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:15:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:15:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:43 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:15:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:15:43 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:15:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:15:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:15:43 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:15:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:15:43 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:43 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:43 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:43 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:43 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:43 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:43 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:43 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:43 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:43 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:43 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:43 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:43 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:15:43 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:15:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:43 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:43 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:15:43 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:15:43 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:15:43 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:15:43 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:15:43 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:15:43 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:15:43 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:15:43 INFO WorkspaceExplorerServiceImpl:142 - end time - 162 msc 0 sec +2016-04-11 09:15:43 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:15:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:15:45 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:15:45 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:15:45 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-11 09:15:45 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:15:45 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:15:45 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:15:45 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:15:45 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:15:45 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-11 09:15:45 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-11 09:15:45 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-11 09:15:45 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-11 09:15:45 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-11 09:15:45 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-11 09:15:45 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:15:45 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:15:45 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:15:45 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:15:45 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-11 09:15:45 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:15:45 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:15:45 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:15:45 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:15:45 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:15:45 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-11 09:15:45 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:45 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:15:45 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:15:45 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:45 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:15:45 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:15:45 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:15:45 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:15:45 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-11 09:15:45 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:15:45 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-11 09:15:45 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:15:45 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-11 09:15:45 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:15:45 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-11 09:15:45 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:15:45 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:45 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:15:45 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-11 09:15:45 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:45 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:15:45 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-11 09:15:45 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:15:45 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-11 09:15:45 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:45 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-11 09:15:45 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-11 09:15:45 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:45 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:45 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:15:45 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-11 09:15:45 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-11 09:15:45 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:45 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:15:45 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-11 09:15:45 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:45 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:45 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:15:45 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-11 09:15:45 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-11 09:15:45 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:45 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:15:45 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-11 09:15:45 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:45 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:15:45 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-11 09:15:45 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-11 09:15:45 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-11 09:15:45 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-11 09:15:45 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:45 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-11 09:15:45 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-11 09:15:45 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:45 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:45 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:15:45 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-11 09:15:45 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-11 09:15:45 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:45 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:15:45 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:15:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:15:45 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:15:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:15:45 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:45 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:45 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:45 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:45 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:15:45 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:15:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:15:45 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:45 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:45 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:45 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:45 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:45 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:15:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:15:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:15:45 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:15:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:15:45 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:15:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:15:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:15:45 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:15:45 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:15:45 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:45 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:45 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:45 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:45 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:45 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:45 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:45 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:45 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:45 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:45 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:45 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:15:46 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:15:46 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:46 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:46 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:15:46 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:15:46 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:15:46 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:15:46 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:15:46 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:15:46 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:15:46 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:15:46 INFO WorkspaceExplorerServiceImpl:142 - end time - 158 msc 0 sec +2016-04-11 09:15:46 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:15:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:15:48 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:15:48 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:15:48 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS +2016-04-11 09:15:48 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:15:48 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:15:48 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS + XMEANS + A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + + + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + + + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:15:48 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:15:48 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:15:48 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:15:48 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:15:48 DEBUG SClient4WPS:290 - WPSClient->Input: + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + +2016-04-11 09:15:48 DEBUG SClient4WPS:290 - WPSClient->Input: + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + +2016-04-11 09:15:48 DEBUG SClient4WPS:290 - WPSClient->Input: + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + +2016-04-11 09:15:48 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:15:48 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:15:48 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:15:48 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:15:48 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:15:48 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:15:48 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:15:48 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:15:48 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:15:48 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:15:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:15:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:15:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:48 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:15:48 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:15:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:48 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:15:48 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:15:48 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:15:48 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:15:48 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:15:48 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:15:48 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:15:48 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:15:48 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:15:48 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:15:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:15:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:15:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:48 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:15:48 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:15:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:48 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:15:48 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:15:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:15:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:15:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:15:48 DEBUG WPS2SM:254 - Conversion to SM Type->maxIterations is a Literal Input +2016-04-11 09:15:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:48 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:15:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:XMeans max number of overall iterations of the clustering learning +2016-04-11 09:15:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxIterations +2016-04-11 09:15:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:15:48 DEBUG WPS2SM:254 - Conversion to SM Type->minClusters is a Literal Input +2016-04-11 09:15:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:48 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:15:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:minimum number of expected clusters +2016-04-11 09:15:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minClusters +2016-04-11 09:15:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:15:48 DEBUG WPS2SM:254 - Conversion to SM Type->maxClusters is a Literal Input +2016-04-11 09:15:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:48 DEBUG WPS2SM:101 - Guessed default value: 50 +2016-04-11 09:15:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of clusters to produce +2016-04-11 09:15:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxClusters +2016-04-11 09:15:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT] +2016-04-11 09:15:48 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:15:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:48 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:15:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:15:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:15:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:15:48 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:15:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:15:48 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:15:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:15:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:15:48 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:15:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:15:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:48 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:15:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:15:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:15:48 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:15:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:15:48 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:15:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:15:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:15:48 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:15:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:15:48 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:48 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:48 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:48 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:48 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:48 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:15:48 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:15:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:48 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:48 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:48 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:15:49 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:15:49 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:15:49 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:15:49 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:15:49 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:15:49 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:15:49 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:15:49 INFO WorkspaceExplorerServiceImpl:142 - end time - 168 msc 0 sec +2016-04-11 09:15:49 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:15:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:15:52 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:15:52 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:15:52 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-11 09:15:52 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:15:52 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:15:52 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:15:52 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:15:52 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:15:52 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-11 09:15:52 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-11 09:15:52 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-11 09:15:52 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-11 09:15:52 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-11 09:15:52 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-11 09:15:52 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:15:52 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:15:52 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:15:52 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:15:52 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-11 09:15:52 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:15:52 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:15:52 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:15:52 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:15:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:15:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-11 09:15:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:52 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:15:52 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:15:52 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:52 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:15:52 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:15:52 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:15:52 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:15:52 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-11 09:15:52 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:15:52 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-11 09:15:52 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:15:52 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-11 09:15:52 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:15:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-11 09:15:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:15:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:52 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:15:52 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-11 09:15:52 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:52 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:15:52 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-11 09:15:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:15:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-11 09:15:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:52 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-11 09:15:52 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-11 09:15:52 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:52 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:52 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:15:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-11 09:15:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-11 09:15:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:52 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:15:52 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-11 09:15:52 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:52 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:52 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:15:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-11 09:15:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-11 09:15:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:52 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:15:52 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-11 09:15:52 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:52 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:15:52 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-11 09:15:52 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-11 09:15:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-11 09:15:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-11 09:15:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:52 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-11 09:15:52 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-11 09:15:52 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:52 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:52 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:15:52 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-11 09:15:52 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-11 09:15:52 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:52 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:15:52 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:15:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:15:52 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:15:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:15:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:52 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:52 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:52 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:15:52 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:15:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:15:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:52 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:52 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:52 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:52 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:15:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:15:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:15:52 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:15:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:15:52 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:15:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:15:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:52 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:15:52 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:15:52 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:15:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:52 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:52 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:52 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:52 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:52 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:52 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:52 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:52 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:15:52 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:15:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:52 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:52 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:53 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:15:53 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:15:53 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:15:53 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:15:53 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:15:53 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:15:53 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:15:53 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:15:53 INFO WorkspaceExplorerServiceImpl:142 - end time - 164 msc 0 sec +2016-04-11 09:15:53 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:15:54 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:15:54 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:15:54 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-11 09:15:54 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:15:54 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:15:54 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:15:54 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:15:54 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:15:54 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:15:54 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:15:54 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-11 09:15:54 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-11 09:15:54 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-11 09:15:54 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:15:54 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:15:54 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:15:54 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:15:54 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:15:54 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:15:54 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:15:54 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:15:54 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:15:54 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:15:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:15:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:15:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:54 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:15:54 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:15:54 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:54 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:15:54 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:15:54 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:15:54 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:15:54 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:15:54 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:15:54 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:15:54 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:15:54 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:15:54 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:15:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:15:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:15:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:54 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:15:54 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:15:54 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:54 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:15:54 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:15:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:15:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:15:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:15:54 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-11 09:15:54 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:54 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-11 09:15:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-11 09:15:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-11 09:15:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-11 09:15:54 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-11 09:15:54 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:54 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:15:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-11 09:15:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-11 09:15:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:15:54 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-11 09:15:54 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:54 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-11 09:15:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-11 09:15:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-11 09:15:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-11 09:15:54 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:15:54 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:54 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:54 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:15:54 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:15:54 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:15:54 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:54 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:15:54 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:15:54 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:15:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:15:54 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:15:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:54 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:15:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:15:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:15:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:54 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:15:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:15:54 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:15:54 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:15:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:15:54 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:54 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:54 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:54 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:54 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:54 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:15:54 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:15:54 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:54 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:54 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:15:54 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:15:54 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:15:54 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:15:54 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:15:54 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:15:54 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:15:54 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:15:54 INFO WorkspaceExplorerServiceImpl:142 - end time - 176 msc 0 sec +2016-04-11 09:15:54 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:15:56 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:15:56 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:15:56 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-11 09:15:56 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:15:56 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:15:56 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:15:56 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:15:56 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:15:56 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:15:56 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:15:56 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-11 09:15:56 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-11 09:15:56 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:15:56 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:15:56 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:15:56 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:15:56 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:15:56 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:15:56 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:15:56 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:15:56 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:15:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:15:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:15:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:56 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:15:56 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:15:56 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:56 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:15:56 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:15:56 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:15:56 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:15:56 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:15:56 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:15:56 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:15:56 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:15:56 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:15:56 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:15:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:15:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:15:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:56 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:15:56 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:15:56 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:56 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:15:56 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:15:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:15:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:15:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:56 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:15:56 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-11 09:15:56 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:56 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:56 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:15:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-11 09:15:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-11 09:15:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:56 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:15:56 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:15:56 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:15:56 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:15:56 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:15:56 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-11 09:15:56 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:15:56 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:15:56 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:15:56 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-11 09:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:15:56 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:15:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:15:56 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:15:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:56 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:15:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:15:56 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:15:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:15:56 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:15:56 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:15:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:15:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:15:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:15:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:15:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:15:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:15:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:15:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:15:56 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:15:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:15:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:15:56 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:15:56 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:15:56 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:15:56 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:15:56 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:15:56 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:15:56 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:15:56 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:15:56 INFO WorkspaceExplorerServiceImpl:142 - end time - 162 msc 0 sec +2016-04-11 09:15:56 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:16:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:16:00 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:16:00 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:16:00 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER +2016-04-11 09:16:00 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:16:00 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:16:00 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER + RASTER_DATA_PUBLISHER + This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants. + + + DatasetTitle + Title of the geospatial dataset to be shown on GeoExplorer + Name of the parameter: DatasetTitle. Title of the geospatial dataset to be shown on GeoExplorer + + + + Generic Raster Layer + + + + DatasetAbstract + Abstract defining the content, the references and usage policies + Name of the parameter: DatasetAbstract. Abstract defining the content, the references and usage policies + + + + Abstract + + + + InnerLayerName + Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files) + Name of the parameter: InnerLayerName. Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files) + + + + band_1 + + + + FileNameOnInfra + Name of the file that will be created in the infrastructures + Name of the parameter: FileNameOnInfra. Name of the file that will be created in the infrastructures + + + + raster-1458666673377.nc + + + + RasterFile + Raster dataset to process + Name of the parameter: RasterFile. Raster dataset to process + + + + text/xml + + + + + text/xml + + + application/d4science + + + + + + Topics + Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather [a sequence of values separated by | ] (format: String) + Name of the parameter: Topics. Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather [a sequence of values separated by | ] (format: String) + + + + + + + SpatialResolution + The resolution of the layer. For NetCDF file this is automatically estimated by data (leave -1) + Name of the parameter: SpatialResolution. The resolution of the layer. For NetCDF file this is automatically estimated by data (leave -1) + + + + -1d + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:16:00 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:16:00 DEBUG SClient4WPS:290 - WPSClient->Input: + DatasetTitle + Title of the geospatial dataset to be shown on GeoExplorer + Name of the parameter: DatasetTitle. Title of the geospatial dataset to be shown on GeoExplorer + + + + Generic Raster Layer + + +2016-04-11 09:16:00 DEBUG SClient4WPS:290 - WPSClient->Input: + DatasetAbstract + Abstract defining the content, the references and usage policies + Name of the parameter: DatasetAbstract. Abstract defining the content, the references and usage policies + + + + Abstract + + +2016-04-11 09:16:00 DEBUG SClient4WPS:290 - WPSClient->Input: + InnerLayerName + Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files) + Name of the parameter: InnerLayerName. Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files) + + + + band_1 + + +2016-04-11 09:16:00 DEBUG SClient4WPS:290 - WPSClient->Input: + FileNameOnInfra + Name of the file that will be created in the infrastructures + Name of the parameter: FileNameOnInfra. Name of the file that will be created in the infrastructures + + + + raster-1458666673377.nc + + +2016-04-11 09:16:00 DEBUG SClient4WPS:290 - WPSClient->Input: + RasterFile + Raster dataset to process + Name of the parameter: RasterFile. Raster dataset to process + + + + text/xml + + + + + text/xml + + + application/d4science + + + + +2016-04-11 09:16:00 DEBUG SClient4WPS:290 - WPSClient->Input: + Topics + Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather [a sequence of values separated by | ] (format: String) + Name of the parameter: Topics. Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather [a sequence of values separated by | ] (format: String) + + + + + +2016-04-11 09:16:00 DEBUG SClient4WPS:290 - WPSClient->Input: + SpatialResolution + The resolution of the layer. For NetCDF file this is automatically estimated by data (leave -1) + Name of the parameter: SpatialResolution. The resolution of the layer. For NetCDF file this is automatically estimated by data (leave -1) + + + + -1d + + +2016-04-11 09:16:00 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:16:00 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:16:00 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:16:00 DEBUG WPS2SM:254 - Conversion to SM Type->DatasetTitle is a Literal Input +2016-04-11 09:16:00 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:00 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:16:00 DEBUG WPS2SM:101 - Guessed default value: Generic Raster Layer +2016-04-11 09:16:00 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Title of the geospatial dataset to be shown on GeoExplorer +2016-04-11 09:16:00 DEBUG WPS2SM:291 - Conversion to SM Type->Name:DatasetTitle +2016-04-11 09:16:00 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:00 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Generic Raster Layer, value=null, name=DatasetTitle, description=Title of the geospatial dataset to be shown on GeoExplorer [Min N. of Entries:1; Max N. of Entries:1; default:Generic Raster Layer], typology=OBJECT] +2016-04-11 09:16:00 DEBUG WPS2SM:254 - Conversion to SM Type->DatasetAbstract is a Literal Input +2016-04-11 09:16:00 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:00 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:16:00 DEBUG WPS2SM:101 - Guessed default value: Abstract +2016-04-11 09:16:00 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Abstract defining the content, the references and usage policies +2016-04-11 09:16:00 DEBUG WPS2SM:291 - Conversion to SM Type->Name:DatasetAbstract +2016-04-11 09:16:00 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:00 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Abstract, value=null, name=DatasetAbstract, description=Abstract defining the content, the references and usage policies [Min N. of Entries:1; Max N. of Entries:1; default:Abstract], typology=OBJECT] +2016-04-11 09:16:00 DEBUG WPS2SM:254 - Conversion to SM Type->InnerLayerName is a Literal Input +2016-04-11 09:16:00 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:00 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:16:00 DEBUG WPS2SM:101 - Guessed default value: band_1 +2016-04-11 09:16:00 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files) +2016-04-11 09:16:00 DEBUG WPS2SM:291 - Conversion to SM Type->Name:InnerLayerName +2016-04-11 09:16:00 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:00 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=band_1, value=null, name=InnerLayerName, description=Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files) [Min N. of Entries:1; Max N. of Entries:1; default:band_1], typology=OBJECT] +2016-04-11 09:16:00 DEBUG WPS2SM:254 - Conversion to SM Type->FileNameOnInfra is a Literal Input +2016-04-11 09:16:00 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:00 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:16:00 DEBUG WPS2SM:101 - Guessed default value: raster-1458666673377.nc +2016-04-11 09:16:00 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Name of the file that will be created in the infrastructures +2016-04-11 09:16:00 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FileNameOnInfra +2016-04-11 09:16:00 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:00 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=raster-1458666673377.nc, value=null, name=FileNameOnInfra, description=Name of the file that will be created in the infrastructures [Min N. of Entries:1; Max N. of Entries:1; default:raster-1458666673377.nc], typology=OBJECT] +2016-04-11 09:16:00 DEBUG WPS2SM:279 - Conversion to SM Type->RasterFile is a Complex Input +2016-04-11 09:16:00 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:16:00 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:16:00 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:16:00 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:16:00 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Raster dataset to process +2016-04-11 09:16:00 DEBUG WPS2SM:291 - Conversion to SM Type->Name:RasterFile +2016-04-11 09:16:00 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:00 DEBUG SClient4WPS:645 - InputParameter: FileParameter [mimeType=text/xml, value=null, name=RasterFile, description=Raster dataset to process [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=FILE] +2016-04-11 09:16:00 DEBUG WPS2SM:254 - Conversion to SM Type->Topics is a Literal Input +2016-04-11 09:16:00 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:00 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:16:00 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:16:00 DEBUG WPS2SM:147 - Machter title: Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:16:00 DEBUG WPS2SM:148 - Machter find: true +2016-04-11 09:16:00 DEBUG WPS2SM:149 - Machter group: a sequence of values separated by | +2016-04-11 09:16:00 DEBUG WPS2SM:150 - Machter start: 100 +2016-04-11 09:16:00 DEBUG WPS2SM:151 - Machter end: 135 +2016-04-11 09:16:00 DEBUG WPS2SM:152 - Machter Group Count: 1 +2016-04-11 09:16:00 DEBUG WPS2SM:155 - Matcher separator: | +2016-04-11 09:16:00 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather [a sequence of values separated by | ] (format: String) +2016-04-11 09:16:00 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Topics +2016-04-11 09:16:00 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:00 DEBUG SClient4WPS:645 - InputParameter: ListParameter [type=java.lang.String, value=null, separator=|, name=Topics, description=Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST] +2016-04-11 09:16:00 DEBUG WPS2SM:254 - Conversion to SM Type->SpatialResolution is a Literal Input +2016-04-11 09:16:00 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:00 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:16:00 DEBUG WPS2SM:101 - Guessed default value: -1d +2016-04-11 09:16:00 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The resolution of the layer. For NetCDF file this is automatically estimated by data (leave -1) +2016-04-11 09:16:00 DEBUG WPS2SM:291 - Conversion to SM Type->Name:SpatialResolution +2016-04-11 09:16:00 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:00 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=-1d, value=null, name=SpatialResolution, description=The resolution of the layer. For NetCDF file this is automatically estimated by data (leave -1) [Min N. of Entries:1; Max N. of Entries:1; default:-1d], typology=OBJECT] +2016-04-11 09:16:00 DEBUG SClient4WPS:649 - Parameters: [ObjectParameter [type=java.lang.String, defaultValue=Generic Raster Layer, value=null, name=DatasetTitle, description=Title of the geospatial dataset to be shown on GeoExplorer [Min N. of Entries:1; Max N. of Entries:1; default:Generic Raster Layer], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue=Abstract, value=null, name=DatasetAbstract, description=Abstract defining the content, the references and usage policies [Min N. of Entries:1; Max N. of Entries:1; default:Abstract], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue=band_1, value=null, name=InnerLayerName, description=Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files) [Min N. of Entries:1; Max N. of Entries:1; default:band_1], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue=raster-1458666673377.nc, value=null, name=FileNameOnInfra, description=Name of the file that will be created in the infrastructures [Min N. of Entries:1; Max N. of Entries:1; default:raster-1458666673377.nc], typology=OBJECT], FileParameter [mimeType=text/xml, value=null, name=RasterFile, description=Raster dataset to process [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=FILE], ListParameter [type=java.lang.String, value=null, separator=|, name=Topics, description=Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST], ObjectParameter [type=java.lang.Double, defaultValue=-1d, value=null, name=SpatialResolution, description=The resolution of the layer. For NetCDF file this is automatically estimated by data (leave -1) [Min N. of Entries:1; Max N. of Entries:1; default:-1d], typology=OBJECT]] +2016-04-11 09:16:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:16:00 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:16:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:16:00 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:16:00 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:16:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:16:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:16:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:16:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:16:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:16:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:16:00 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:16:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:16:00 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:16:00 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:16:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:16:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:16:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:16:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:16:00 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:16:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:16:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:16:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:16:00 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:16:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:16:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:16:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:16:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:16:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:16:00 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:16:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:16:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:16:00 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:16:00 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:16:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:16:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:16:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:16:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:16:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:16:00 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:16:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:16:00 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:16:00 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:16:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:16:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:16:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:16:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:16:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:16:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:16:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:16:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:16:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:16:00 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:16:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:16:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:16:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:16:00 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:16:00 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:16:00 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:16:00 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:16:00 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:16:00 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:16:00 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:16:00 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:16:00 INFO WorkspaceExplorerServiceImpl:142 - end time - 147 msc 0 sec +2016-04-11 09:16:00 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:16:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:16:12 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:16:12 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:16:12 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON +2016-04-11 09:16:12 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:16:12 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:16:12 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON + MAPS_COMPARISON + An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column. + + + Layer_1 + First Layer Title or UUID: The title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer + Name of the parameter: Layer_1. First Layer Title or UUID: The title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer + + + + + + + Layer_2 + Second Layer Title or UUID: The title or the UUID (preferred) of a second layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer + Name of the parameter: Layer_2. Second Layer Title or UUID: The title or the UUID (preferred) of a second layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer + + + + + + + Z + value of Z. Default is 0, that means comparison will be at surface level + Name of the parameter: Z. value of Z. Default is 0, that means comparison will be at surface level + + + + 0 + + + + ValuesComparisonThreshold + A comparison threshold for the values in the map. Null equals to 0.1 + Name of the parameter: ValuesComparisonThreshold. A comparison threshold for the values in the map. Null equals to 0.1 + + + + 0.1 + + + + TimeIndex_1 + First Layer Time Index. The default is the first + Name of the parameter: TimeIndex_1. First Layer Time Index. The default is the first + + + + 0 + + + + TimeIndex_2 + Second Layer Time Index. The default is the first + Name of the parameter: TimeIndex_2. Second Layer Time Index. The default is the first + + + + 0 + + + + KThreshold + Threshold for K-Statistic: over this threshold values will be considered 1 for agreement calculation. Default is 0.5 + Name of the parameter: KThreshold. Threshold for K-Statistic: over this threshold values will be considered 1 for agreement calculation. Default is 0.5 + + + + 0.5 + + + + + + Error Distribution + Error Distribution + Error Distribution + + + + image/png + + + + + image/png + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:16:12 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:16:12 DEBUG SClient4WPS:290 - WPSClient->Input: + Layer_1 + First Layer Title or UUID: The title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer + Name of the parameter: Layer_1. First Layer Title or UUID: The title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer + + + + + +2016-04-11 09:16:12 DEBUG SClient4WPS:290 - WPSClient->Input: + Layer_2 + Second Layer Title or UUID: The title or the UUID (preferred) of a second layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer + Name of the parameter: Layer_2. Second Layer Title or UUID: The title or the UUID (preferred) of a second layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer + + + + + +2016-04-11 09:16:12 DEBUG SClient4WPS:290 - WPSClient->Input: + Z + value of Z. Default is 0, that means comparison will be at surface level + Name of the parameter: Z. value of Z. Default is 0, that means comparison will be at surface level + + + + 0 + + +2016-04-11 09:16:12 DEBUG SClient4WPS:290 - WPSClient->Input: + ValuesComparisonThreshold + A comparison threshold for the values in the map. Null equals to 0.1 + Name of the parameter: ValuesComparisonThreshold. A comparison threshold for the values in the map. Null equals to 0.1 + + + + 0.1 + + +2016-04-11 09:16:12 DEBUG SClient4WPS:290 - WPSClient->Input: + TimeIndex_1 + First Layer Time Index. The default is the first + Name of the parameter: TimeIndex_1. First Layer Time Index. The default is the first + + + + 0 + + +2016-04-11 09:16:12 DEBUG SClient4WPS:290 - WPSClient->Input: + TimeIndex_2 + Second Layer Time Index. The default is the first + Name of the parameter: TimeIndex_2. Second Layer Time Index. The default is the first + + + + 0 + + +2016-04-11 09:16:12 DEBUG SClient4WPS:290 - WPSClient->Input: + KThreshold + Threshold for K-Statistic: over this threshold values will be considered 1 for agreement calculation. Default is 0.5 + Name of the parameter: KThreshold. Threshold for K-Statistic: over this threshold values will be considered 1 for agreement calculation. Default is 0.5 + + + + 0.5 + + +2016-04-11 09:16:12 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:16:12 DEBUG SClient4WPS:297 - WPSClient->Output: + Error Distribution + Error Distribution + Error Distribution + + + + image/png + + + + + image/png + + + + +2016-04-11 09:16:12 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:16:12 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:16:12 DEBUG WPS2SM:254 - Conversion to SM Type->Layer_1 is a Literal Input +2016-04-11 09:16:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:12 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:16:12 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:16:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:First Layer Title or UUID: The title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer +2016-04-11 09:16:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Layer_1 +2016-04-11 09:16:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=Layer_1, description=First Layer Title or UUID: The title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT] +2016-04-11 09:16:12 DEBUG WPS2SM:254 - Conversion to SM Type->Layer_2 is a Literal Input +2016-04-11 09:16:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:12 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:16:12 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:16:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Second Layer Title or UUID: The title or the UUID (preferred) of a second layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer +2016-04-11 09:16:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Layer_2 +2016-04-11 09:16:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=Layer_2, description=Second Layer Title or UUID: The title or the UUID (preferred) of a second layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT] +2016-04-11 09:16:12 DEBUG WPS2SM:254 - Conversion to SM Type->Z is a Literal Input +2016-04-11 09:16:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:16:12 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-11 09:16:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:value of Z. Default is 0, that means comparison will be at surface level +2016-04-11 09:16:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Z +2016-04-11 09:16:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=Z, description=value of Z. Default is 0, that means comparison will be at surface level [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-11 09:16:12 DEBUG WPS2SM:254 - Conversion to SM Type->ValuesComparisonThreshold is a Literal Input +2016-04-11 09:16:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:16:12 DEBUG WPS2SM:101 - Guessed default value: 0.1 +2016-04-11 09:16:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A comparison threshold for the values in the map. Null equals to 0.1 +2016-04-11 09:16:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:ValuesComparisonThreshold +2016-04-11 09:16:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0.1, value=null, name=ValuesComparisonThreshold, description=A comparison threshold for the values in the map. Null equals to 0.1 [Min N. of Entries:1; Max N. of Entries:1; default:0.1], typology=OBJECT] +2016-04-11 09:16:12 DEBUG WPS2SM:254 - Conversion to SM Type->TimeIndex_1 is a Literal Input +2016-04-11 09:16:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:16:12 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-11 09:16:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:First Layer Time Index. The default is the first +2016-04-11 09:16:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:TimeIndex_1 +2016-04-11 09:16:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex_1, description=First Layer Time Index. The default is the first [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-11 09:16:12 DEBUG WPS2SM:254 - Conversion to SM Type->TimeIndex_2 is a Literal Input +2016-04-11 09:16:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:16:12 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-11 09:16:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Second Layer Time Index. The default is the first +2016-04-11 09:16:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:TimeIndex_2 +2016-04-11 09:16:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex_2, description=Second Layer Time Index. The default is the first [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-11 09:16:12 DEBUG WPS2SM:254 - Conversion to SM Type->KThreshold is a Literal Input +2016-04-11 09:16:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:16:12 DEBUG WPS2SM:101 - Guessed default value: 0.5 +2016-04-11 09:16:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Threshold for K-Statistic: over this threshold values will be considered 1 for agreement calculation. Default is 0.5 +2016-04-11 09:16:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:KThreshold +2016-04-11 09:16:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=KThreshold, description=Threshold for K-Statistic: over this threshold values will be considered 1 for agreement calculation. Default is 0.5 [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT] +2016-04-11 09:16:12 DEBUG SClient4WPS:649 - Parameters: [ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=Layer_1, description=First Layer Title or UUID: The title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=Layer_2, description=Second Layer Title or UUID: The title or the UUID (preferred) of a second layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=Z, description=value of Z. Default is 0, that means comparison will be at surface level [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=0.1, value=null, name=ValuesComparisonThreshold, description=A comparison threshold for the values in the map. Null equals to 0.1 [Min N. of Entries:1; Max N. of Entries:1; default:0.1], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex_1, description=First Layer Time Index. The default is the first [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex_2, description=Second Layer Time Index. The default is the first [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=KThreshold, description=Threshold for K-Statistic: over this threshold values will be considered 1 for agreement calculation. Default is 0.5 [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT]] +2016-04-11 09:16:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:16:26 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:16:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:16:57 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:16:57 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:16:57 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER +2016-04-11 09:16:57 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:16:57 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:16:57 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER + RASTER_DATA_PUBLISHER + This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants. + + + DatasetTitle + Title of the geospatial dataset to be shown on GeoExplorer + Name of the parameter: DatasetTitle. Title of the geospatial dataset to be shown on GeoExplorer + + + + Generic Raster Layer + + + + DatasetAbstract + Abstract defining the content, the references and usage policies + Name of the parameter: DatasetAbstract. Abstract defining the content, the references and usage policies + + + + Abstract + + + + InnerLayerName + Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files) + Name of the parameter: InnerLayerName. Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files) + + + + band_1 + + + + FileNameOnInfra + Name of the file that will be created in the infrastructures + Name of the parameter: FileNameOnInfra. Name of the file that will be created in the infrastructures + + + + raster-1458666673377.nc + + + + RasterFile + Raster dataset to process + Name of the parameter: RasterFile. Raster dataset to process + + + + text/xml + + + + + text/xml + + + application/d4science + + + + + + Topics + Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather [a sequence of values separated by | ] (format: String) + Name of the parameter: Topics. Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather [a sequence of values separated by | ] (format: String) + + + + + + + SpatialResolution + The resolution of the layer. For NetCDF file this is automatically estimated by data (leave -1) + Name of the parameter: SpatialResolution. The resolution of the layer. For NetCDF file this is automatically estimated by data (leave -1) + + + + -1d + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:16:57 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:16:57 DEBUG SClient4WPS:290 - WPSClient->Input: + DatasetTitle + Title of the geospatial dataset to be shown on GeoExplorer + Name of the parameter: DatasetTitle. Title of the geospatial dataset to be shown on GeoExplorer + + + + Generic Raster Layer + + +2016-04-11 09:16:57 DEBUG SClient4WPS:290 - WPSClient->Input: + DatasetAbstract + Abstract defining the content, the references and usage policies + Name of the parameter: DatasetAbstract. Abstract defining the content, the references and usage policies + + + + Abstract + + +2016-04-11 09:16:57 DEBUG SClient4WPS:290 - WPSClient->Input: + InnerLayerName + Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files) + Name of the parameter: InnerLayerName. Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files) + + + + band_1 + + +2016-04-11 09:16:57 DEBUG SClient4WPS:290 - WPSClient->Input: + FileNameOnInfra + Name of the file that will be created in the infrastructures + Name of the parameter: FileNameOnInfra. Name of the file that will be created in the infrastructures + + + + raster-1458666673377.nc + + +2016-04-11 09:16:57 DEBUG SClient4WPS:290 - WPSClient->Input: + RasterFile + Raster dataset to process + Name of the parameter: RasterFile. Raster dataset to process + + + + text/xml + + + + + text/xml + + + application/d4science + + + + +2016-04-11 09:16:57 DEBUG SClient4WPS:290 - WPSClient->Input: + Topics + Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather [a sequence of values separated by | ] (format: String) + Name of the parameter: Topics. Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather [a sequence of values separated by | ] (format: String) + + + + + +2016-04-11 09:16:57 DEBUG SClient4WPS:290 - WPSClient->Input: + SpatialResolution + The resolution of the layer. For NetCDF file this is automatically estimated by data (leave -1) + Name of the parameter: SpatialResolution. The resolution of the layer. For NetCDF file this is automatically estimated by data (leave -1) + + + + -1d + + +2016-04-11 09:16:57 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:16:57 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:16:57 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:16:57 DEBUG WPS2SM:254 - Conversion to SM Type->DatasetTitle is a Literal Input +2016-04-11 09:16:57 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:57 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:16:57 DEBUG WPS2SM:101 - Guessed default value: Generic Raster Layer +2016-04-11 09:16:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Title of the geospatial dataset to be shown on GeoExplorer +2016-04-11 09:16:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:DatasetTitle +2016-04-11 09:16:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:57 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Generic Raster Layer, value=null, name=DatasetTitle, description=Title of the geospatial dataset to be shown on GeoExplorer [Min N. of Entries:1; Max N. of Entries:1; default:Generic Raster Layer], typology=OBJECT] +2016-04-11 09:16:57 DEBUG WPS2SM:254 - Conversion to SM Type->DatasetAbstract is a Literal Input +2016-04-11 09:16:57 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:57 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:16:57 DEBUG WPS2SM:101 - Guessed default value: Abstract +2016-04-11 09:16:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Abstract defining the content, the references and usage policies +2016-04-11 09:16:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:DatasetAbstract +2016-04-11 09:16:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:57 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Abstract, value=null, name=DatasetAbstract, description=Abstract defining the content, the references and usage policies [Min N. of Entries:1; Max N. of Entries:1; default:Abstract], typology=OBJECT] +2016-04-11 09:16:57 DEBUG WPS2SM:254 - Conversion to SM Type->InnerLayerName is a Literal Input +2016-04-11 09:16:57 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:57 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:16:57 DEBUG WPS2SM:101 - Guessed default value: band_1 +2016-04-11 09:16:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files) +2016-04-11 09:16:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:InnerLayerName +2016-04-11 09:16:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:57 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=band_1, value=null, name=InnerLayerName, description=Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files) [Min N. of Entries:1; Max N. of Entries:1; default:band_1], typology=OBJECT] +2016-04-11 09:16:57 DEBUG WPS2SM:254 - Conversion to SM Type->FileNameOnInfra is a Literal Input +2016-04-11 09:16:57 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:57 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:16:57 DEBUG WPS2SM:101 - Guessed default value: raster-1458666673377.nc +2016-04-11 09:16:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Name of the file that will be created in the infrastructures +2016-04-11 09:16:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FileNameOnInfra +2016-04-11 09:16:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:57 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=raster-1458666673377.nc, value=null, name=FileNameOnInfra, description=Name of the file that will be created in the infrastructures [Min N. of Entries:1; Max N. of Entries:1; default:raster-1458666673377.nc], typology=OBJECT] +2016-04-11 09:16:57 DEBUG WPS2SM:279 - Conversion to SM Type->RasterFile is a Complex Input +2016-04-11 09:16:57 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:16:57 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:16:57 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:16:57 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:16:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Raster dataset to process +2016-04-11 09:16:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:RasterFile +2016-04-11 09:16:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:57 DEBUG SClient4WPS:645 - InputParameter: FileParameter [mimeType=text/xml, value=null, name=RasterFile, description=Raster dataset to process [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=FILE] +2016-04-11 09:16:57 DEBUG WPS2SM:254 - Conversion to SM Type->Topics is a Literal Input +2016-04-11 09:16:57 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:57 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:16:57 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:16:57 DEBUG WPS2SM:147 - Machter title: Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:16:57 DEBUG WPS2SM:148 - Machter find: true +2016-04-11 09:16:57 DEBUG WPS2SM:149 - Machter group: a sequence of values separated by | +2016-04-11 09:16:57 DEBUG WPS2SM:150 - Machter start: 100 +2016-04-11 09:16:57 DEBUG WPS2SM:151 - Machter end: 135 +2016-04-11 09:16:57 DEBUG WPS2SM:152 - Machter Group Count: 1 +2016-04-11 09:16:57 DEBUG WPS2SM:155 - Matcher separator: | +2016-04-11 09:16:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather [a sequence of values separated by | ] (format: String) +2016-04-11 09:16:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Topics +2016-04-11 09:16:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:57 DEBUG SClient4WPS:645 - InputParameter: ListParameter [type=java.lang.String, value=null, separator=|, name=Topics, description=Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST] +2016-04-11 09:16:57 DEBUG WPS2SM:254 - Conversion to SM Type->SpatialResolution is a Literal Input +2016-04-11 09:16:57 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:16:57 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:16:57 DEBUG WPS2SM:101 - Guessed default value: -1d +2016-04-11 09:16:57 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The resolution of the layer. For NetCDF file this is automatically estimated by data (leave -1) +2016-04-11 09:16:57 DEBUG WPS2SM:291 - Conversion to SM Type->Name:SpatialResolution +2016-04-11 09:16:57 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:16:57 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=-1d, value=null, name=SpatialResolution, description=The resolution of the layer. For NetCDF file this is automatically estimated by data (leave -1) [Min N. of Entries:1; Max N. of Entries:1; default:-1d], typology=OBJECT] +2016-04-11 09:16:57 DEBUG SClient4WPS:649 - Parameters: [ObjectParameter [type=java.lang.String, defaultValue=Generic Raster Layer, value=null, name=DatasetTitle, description=Title of the geospatial dataset to be shown on GeoExplorer [Min N. of Entries:1; Max N. of Entries:1; default:Generic Raster Layer], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue=Abstract, value=null, name=DatasetAbstract, description=Abstract defining the content, the references and usage policies [Min N. of Entries:1; Max N. of Entries:1; default:Abstract], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue=band_1, value=null, name=InnerLayerName, description=Name of the inner layer or band to be published as a Map (ignored for non-NetCDF files) [Min N. of Entries:1; Max N. of Entries:1; default:band_1], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue=raster-1458666673377.nc, value=null, name=FileNameOnInfra, description=Name of the file that will be created in the infrastructures [Min N. of Entries:1; Max N. of Entries:1; default:raster-1458666673377.nc], typology=OBJECT], FileParameter [mimeType=text/xml, value=null, name=RasterFile, description=Raster dataset to process [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=FILE], ListParameter [type=java.lang.String, value=null, separator=|, name=Topics, description=Topics to be attached to the published dataset. E.g. Biodiversity, D4Science, Environment, Weather [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST], ObjectParameter [type=java.lang.Double, defaultValue=-1d, value=null, name=SpatialResolution, description=The resolution of the layer. For NetCDF file this is automatically estimated by data (leave -1) [Min N. of Entries:1; Max N. of Entries:1; default:-1d], typology=OBJECT]] +2016-04-11 09:16:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:16:58 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:16:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:16:58 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:16:58 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:16:58 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:16:58 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:16:58 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:16:58 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:16:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:16:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:16:58 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:16:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:16:58 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:16:58 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:16:58 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:16:58 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:16:58 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:16:58 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:16:58 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:16:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:16:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:16:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:16:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:16:58 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:16:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:16:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:16:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:16:58 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:16:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:16:58 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:16:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:16:58 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:16:58 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:16:58 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:16:58 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:16:58 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:16:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:16:58 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:16:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:16:58 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:16:58 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:16:58 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:16:58 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:16:58 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:16:58 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:16:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:16:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:16:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:16:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:16:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:16:58 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:16:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:16:58 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:16:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:16:58 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:16:58 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:16:58 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:16:58 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:16:58 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:16:58 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:16:58 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:16:58 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:16:58 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:16:58 INFO WorkspaceExplorerServiceImpl:142 - end time - 185 msc 0 sec +2016-04-11 09:16:58 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:17:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:17:21 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:18:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:18:16 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:18:35 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 09:18:35 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 09:18:35 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 09:19:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:19:11 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:20:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:20:06 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:20:51 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-11 09:20:51 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-11 09:20:51 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-11 09:20:51 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 09:20:51 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:20:51 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:20:51 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 09:20:51 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@4079b806 +2016-04-11 09:20:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:20:51 INFO ASLSession:352 - Logging the entrance +2016-04-11 09:20:51 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 09:20:51 DEBUG TemplateModel:83 - 2016-04-11 09:20:51, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 09:20:51 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:20:51 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-11 09:20:54 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 09:20:54 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 09:20:54 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 09:20:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:20:54 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:20:54 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:20:54 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:20:54 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 123 ms +2016-04-11 09:20:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-11 09:20:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-11 09:20:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-11 09:20:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-11 09:20:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-11 09:20:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-11 09:20:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-11 09:20:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-11 09:20:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-11 09:20:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-11 09:20:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-11 09:20:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-11 09:20:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-11 09:20:54 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-11 09:20:54 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-11 09:20:54 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:20:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:20:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@306230e6 +2016-04-11 09:20:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@2899bb82 +2016-04-11 09:20:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@506660c2 +2016-04-11 09:20:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@3dbc562e +2016-04-11 09:20:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 89 ms +2016-04-11 09:20:54 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-11 09:20:54 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-11 09:20:54 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-11 09:20:54 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-11 09:20:54 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-11 09:20:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:20:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:20:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-11 09:20:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 27 ms +2016-04-11 09:20:54 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-11 09:20:54 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:20:54 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-11 09:20:54 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:20:55 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:20:55 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-11 09:20:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:20:59 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:20:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:20:59 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-11 09:20:59 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:20:59 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:20:59 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:20:59 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:20:59 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:20:59 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:20:59 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:20:59 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-11 09:20:59 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-11 09:20:59 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:20:59 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:20:59 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:20:59 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:20:59 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:20:59 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:20:59 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:20:59 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:20:59 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:20:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:20:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:20:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:20:59 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:20:59 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:20:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:20:59 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:20:59 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:20:59 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:20:59 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:20:59 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:20:59 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:20:59 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:20:59 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:20:59 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:20:59 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:20:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:20:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:20:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:20:59 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:20:59 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:20:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:20:59 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:20:59 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:20:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:20:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:20:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:20:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:20:59 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-11 09:20:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:20:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:20:59 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:20:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-11 09:20:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-11 09:20:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:20:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:20:59 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:20:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:20:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:20:59 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:20:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-11 09:20:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:20:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:20:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:20:59 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-11 09:20:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:20:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:20:59 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:20:59 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:20:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:20:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:20:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:20:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:20:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:20:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:20:59 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:20:59 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:20:59 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:20:59 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-11 09:20:59 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:20:59 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:20:59 DEBUG JCRRepository:271 - Initialize repository +2016-04-11 09:20:59 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-11 09:20:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-11 09:20:59 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:20:59 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-11 09:20:59 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-11 09:21:00 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-11 09:21:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:21:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:00 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:21:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:00 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-11 09:21:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:00 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-11 09:21:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:21:00 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:21:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:21:00 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:21:00 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:21:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:21:00 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:21:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:21:00 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:21:00 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:21:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:00 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:21:00 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:21:00 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-11 09:21:00 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:21:00 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:21:00 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-11 09:21:00 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-11 09:21:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:00 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:21:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:21:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:21:00 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:21:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:21:00 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:21:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:00 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:21:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:21:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:21:00 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:21:00 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:21:00 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 32 ms +2016-04-11 09:21:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:21:00 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:21:00 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-11 09:21:00 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-11 09:21:00 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-11 09:21:00 INFO ISClientConnector:82 - found only one RR, take it +2016-04-11 09:21:00 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-11 09:21:00 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-11 09:21:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:21:00 DEBUG StorageClient:517 - set scope: /gcube +2016-04-11 09:21:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:21:00 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:21:00 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:21:00 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-11 09:21:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:21:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:21:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:21:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:21:00 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:21:00 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:21:00 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:21:00 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:21:00 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:21:00 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:21:00 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:21:00 INFO WorkspaceExplorerServiceImpl:142 - end time - 412 msc 0 sec +2016-04-11 09:21:00 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:21:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:21:04 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:21:04 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:21:04 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-11 09:21:04 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:21:04 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:21:04 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:21:04 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:21:04 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:21:04 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:21:04 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:21:04 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-11 09:21:04 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-11 09:21:04 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-11 09:21:04 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:21:04 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:21:04 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:21:04 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:21:04 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:21:04 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:21:04 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:21:04 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:21:04 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:21:04 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:21:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:21:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:21:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:04 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:21:04 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:21:04 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:04 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:21:04 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:21:04 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:21:04 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:21:04 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:21:04 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:21:04 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:21:04 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:21:04 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:21:04 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:21:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:21:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:21:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:04 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:21:04 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:21:04 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:04 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:21:04 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:21:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:21:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:21:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:21:04 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-11 09:21:04 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:04 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:21:04 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-11 09:21:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-11 09:21:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-11 09:21:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-11 09:21:04 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-11 09:21:04 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:04 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:21:04 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:21:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-11 09:21:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-11 09:21:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:21:04 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-11 09:21:04 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:04 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:21:04 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-11 09:21:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-11 09:21:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-11 09:21:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-11 09:21:04 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:21:04 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:04 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:21:04 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:21:04 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:21:04 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:21:04 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:04 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:21:04 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:21:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:21:04 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:21:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:21:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:21:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:21:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:04 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:21:04 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:21:04 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:21:04 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:21:04 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:21:04 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:04 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:04 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:04 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:04 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:21:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:04 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:21:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:21:05 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:21:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:21:05 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:21:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:21:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:21:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:21:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:21:05 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:21:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:21:05 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:21:05 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:21:05 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:05 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:05 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:05 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:21:05 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:21:05 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:05 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:05 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:21:05 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:21:05 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:21:05 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:21:05 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:21:05 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:21:05 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:21:05 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:21:05 INFO WorkspaceExplorerServiceImpl:142 - end time - 246 msc 0 sec +2016-04-11 09:21:05 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:21:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:21:08 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:21:08 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:21:08 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-11 09:21:08 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:21:08 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:21:08 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:21:08 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:21:08 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:21:08 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-11 09:21:08 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-11 09:21:08 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-11 09:21:08 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-11 09:21:08 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-11 09:21:08 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-11 09:21:08 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:21:08 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:21:08 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:21:08 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:21:08 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-11 09:21:08 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:21:08 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:21:08 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:21:08 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:21:08 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:21:08 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-11 09:21:08 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:08 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:21:08 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:21:08 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:08 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:21:08 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:21:08 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:21:08 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:21:08 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-11 09:21:08 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:21:08 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-11 09:21:08 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:21:08 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-11 09:21:08 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:21:08 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-11 09:21:08 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:21:08 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:08 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:21:08 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-11 09:21:08 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:08 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:21:08 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-11 09:21:08 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:21:08 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-11 09:21:08 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:08 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-11 09:21:08 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-11 09:21:08 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:08 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:21:08 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:21:08 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-11 09:21:08 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-11 09:21:08 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:08 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:21:08 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-11 09:21:08 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:08 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:21:08 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:21:08 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-11 09:21:08 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-11 09:21:08 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:08 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:21:08 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-11 09:21:08 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:08 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:21:08 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-11 09:21:08 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-11 09:21:08 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-11 09:21:08 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-11 09:21:08 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:08 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-11 09:21:08 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-11 09:21:08 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:08 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:21:08 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:21:08 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-11 09:21:08 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-11 09:21:08 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:08 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:21:08 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:21:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:21:09 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:21:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:21:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:21:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:21:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:21:09 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:21:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:21:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:21:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:21:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:09 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:21:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:21:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:21:09 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:21:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:21:09 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:21:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:21:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:21:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:21:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:21:09 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:21:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:21:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:21:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:21:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:21:09 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:21:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:09 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:21:09 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:21:09 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:21:09 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:21:09 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:21:09 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:21:09 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:21:09 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:21:09 INFO WorkspaceExplorerServiceImpl:142 - end time - 173 msc 0 sec +2016-04-11 09:21:09 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:21:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:21:12 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:21:12 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:21:12 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS +2016-04-11 09:21:12 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:21:12 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:21:13 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS + XMEANS + A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + + + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + + + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:21:13 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:21:13 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:21:13 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:21:13 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:21:13 DEBUG SClient4WPS:290 - WPSClient->Input: + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + +2016-04-11 09:21:13 DEBUG SClient4WPS:290 - WPSClient->Input: + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + +2016-04-11 09:21:13 DEBUG SClient4WPS:290 - WPSClient->Input: + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + +2016-04-11 09:21:13 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:21:13 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:21:13 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:21:13 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:21:13 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:21:13 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:21:13 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:21:13 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:21:13 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:21:13 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:21:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:21:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:21:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:13 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:21:13 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:21:13 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:13 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:21:13 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:21:13 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:21:13 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:21:13 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:21:13 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:21:13 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:21:13 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:21:13 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:21:13 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:21:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:21:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:21:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:13 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:21:13 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:21:13 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:13 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:21:13 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:21:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:21:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:21:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:13 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:21:13 DEBUG WPS2SM:254 - Conversion to SM Type->maxIterations is a Literal Input +2016-04-11 09:21:13 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:13 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:21:13 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:21:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:XMeans max number of overall iterations of the clustering learning +2016-04-11 09:21:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxIterations +2016-04-11 09:21:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:13 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:21:13 DEBUG WPS2SM:254 - Conversion to SM Type->minClusters is a Literal Input +2016-04-11 09:21:13 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:13 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:21:13 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:21:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:minimum number of expected clusters +2016-04-11 09:21:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minClusters +2016-04-11 09:21:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:13 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:21:13 DEBUG WPS2SM:254 - Conversion to SM Type->maxClusters is a Literal Input +2016-04-11 09:21:13 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:13 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:21:13 DEBUG WPS2SM:101 - Guessed default value: 50 +2016-04-11 09:21:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of clusters to produce +2016-04-11 09:21:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxClusters +2016-04-11 09:21:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:13 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT] +2016-04-11 09:21:13 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:21:13 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:13 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:21:13 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:21:13 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:21:13 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:21:13 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:13 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:21:13 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:21:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:21:13 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:21:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:21:13 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:21:13 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:21:13 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:13 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:13 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:13 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:13 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:21:13 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:21:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:21:13 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:21:13 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:21:13 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:13 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:13 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:13 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:13 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:21:13 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:21:13 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:21:13 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:21:13 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:13 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:13 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:21:13 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:21:13 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:21:13 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:21:13 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:21:13 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:13 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:13 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:13 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:13 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:21:13 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:21:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:21:13 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:21:13 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:21:13 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:13 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:13 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:13 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:13 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:13 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:13 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:21:13 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:21:13 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:13 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:13 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:13 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:13 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:21:13 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:21:13 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:21:13 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:21:13 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:21:13 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:21:13 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:21:13 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:21:13 INFO WorkspaceExplorerServiceImpl:142 - end time - 179 msc 0 sec +2016-04-11 09:21:13 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:21:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:21:16 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:21:16 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:21:16 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-11 09:21:16 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:21:16 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:21:17 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:21:17 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:21:17 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:21:17 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:21:17 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:21:17 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-11 09:21:17 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-11 09:21:17 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:21:17 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:21:17 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:21:17 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:21:17 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:21:17 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:21:17 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:21:17 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:21:17 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:21:17 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:21:17 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:21:17 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:17 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:21:17 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:21:17 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:17 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:21:17 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:21:17 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:21:17 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:21:17 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:21:17 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:21:17 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:21:17 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:21:17 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:21:17 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:21:17 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:21:17 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:21:17 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:17 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:21:17 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:21:17 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:17 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:21:17 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:21:17 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:21:17 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:21:17 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:17 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:21:17 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-11 09:21:17 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:17 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:21:17 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:21:17 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-11 09:21:17 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-11 09:21:17 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:17 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:21:17 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:21:17 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:21:17 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:21:17 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:21:17 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-11 09:21:17 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:21:17 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:21:17 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:21:17 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-11 09:21:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:21:17 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:21:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:21:17 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:21:17 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:21:17 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:17 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:17 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:17 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:21:17 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:21:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:21:17 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:21:17 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:21:17 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:17 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:17 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:17 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:17 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:21:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:21:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:21:17 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:21:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:21:17 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:21:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:21:17 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:21:17 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:21:17 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:17 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:17 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:17 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:21:17 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:21:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:21:17 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:21:17 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:21:17 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:21:17 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:21:17 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:21:17 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:21:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:17 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:21:17 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:21:17 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:21:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:17 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:21:17 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:21:17 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:21:17 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:21:17 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:21:17 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:21:17 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:21:17 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:21:17 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:21:17 INFO WorkspaceExplorerServiceImpl:142 - end time - 186 msc 0 sec +2016-04-11 09:21:17 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:21:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:21:46 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:22:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:22:41 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:25:49 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-11 09:25:49 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-11 09:25:49 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-11 09:25:49 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 09:25:49 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:25:49 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:25:49 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 09:25:49 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@5c117d49 +2016-04-11 09:25:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:25:49 INFO ASLSession:352 - Logging the entrance +2016-04-11 09:25:49 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 09:25:49 DEBUG TemplateModel:83 - 2016-04-11 09:25:49, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 09:25:49 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:25:49 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-11 09:25:53 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 09:25:53 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 09:25:53 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 09:25:53 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:25:53 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:25:53 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:25:53 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:25:53 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 134 ms +2016-04-11 09:25:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-11 09:25:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-11 09:25:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-11 09:25:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-11 09:25:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-11 09:25:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-11 09:25:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-11 09:25:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-11 09:25:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-11 09:25:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-11 09:25:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-11 09:25:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-11 09:25:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-11 09:25:53 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-11 09:25:53 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-11 09:25:54 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:25:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:25:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@1f154fc1 +2016-04-11 09:25:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@1a59a58c +2016-04-11 09:25:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@dde16c3 +2016-04-11 09:25:54 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@7bbb3c2a +2016-04-11 09:25:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 197 ms +2016-04-11 09:25:54 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-11 09:25:54 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-11 09:25:54 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-11 09:25:54 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-11 09:25:54 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-11 09:25:54 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:25:54 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:25:54 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-11 09:25:54 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 25 ms +2016-04-11 09:25:54 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-11 09:25:54 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:25:54 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-11 09:25:54 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:25:55 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:25:55 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-11 09:25:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:25:58 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:25:58 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:25:58 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-11 09:25:58 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:25:58 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:25:59 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:25:59 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:25:59 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:25:59 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:25:59 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:25:59 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-11 09:25:59 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-11 09:25:59 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:25:59 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:25:59 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:25:59 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:25:59 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:25:59 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:25:59 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:25:59 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:25:59 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:25:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:25:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:25:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:25:59 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:25:59 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:25:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:25:59 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:25:59 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:25:59 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:25:59 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:25:59 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:25:59 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:25:59 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:25:59 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:25:59 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:25:59 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:25:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:25:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:25:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:25:59 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:25:59 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:25:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:25:59 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:25:59 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:25:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:25:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:25:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:25:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:25:59 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-11 09:25:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:25:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:25:59 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:25:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-11 09:25:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-11 09:25:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:25:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:25:59 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:25:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:25:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:25:59 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:25:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-11 09:25:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:25:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:25:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:25:59 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-11 09:25:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:25:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:25:59 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:25:59 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:25:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:25:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:25:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:25:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:25:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:25:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:25:59 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:25:59 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:25:59 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:25:59 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-11 09:25:59 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:25:59 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:25:59 DEBUG JCRRepository:271 - Initialize repository +2016-04-11 09:25:59 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-11 09:25:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-11 09:25:59 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:25:59 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-11 09:25:59 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 20 ms +2016-04-11 09:25:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:25:59 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:25:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:25:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:25:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:25:59 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:25:59 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:25:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:25:59 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:25:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:25:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:25:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:25:59 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:25:59 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:26:00 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-11 09:26:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:26:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:26:00 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:26:00 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:26:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:26:00 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:26:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:26:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:26:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:26:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:26:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:26:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:26:00 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-11 09:26:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:26:00 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-11 09:26:00 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:26:00 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:26:00 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-11 09:26:01 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:26:01 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:26:01 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-11 09:26:01 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-11 09:26:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:26:01 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:26:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:26:01 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:26:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:26:01 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:26:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:01 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:26:01 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:26:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:01 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:01 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:26:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:26:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:26:02 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:26:02 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:02 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:26:03 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:26:03 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:03 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:03 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:26:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:26:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:26:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:26:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:26:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 21 ms +2016-04-11 09:26:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:26:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:26:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-11 09:26:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 23 ms +2016-04-11 09:26:03 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-11 09:26:03 INFO ISClientConnector:82 - found only one RR, take it +2016-04-11 09:26:03 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-11 09:26:03 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-11 09:26:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:26:03 DEBUG StorageClient:517 - set scope: /gcube +2016-04-11 09:26:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:26:03 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:26:03 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:26:03 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 18 ms +2016-04-11 09:26:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:26:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:26:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:26:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:26:03 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:26:03 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:26:03 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:26:03 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:26:03 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:26:03 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:26:03 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:26:03 INFO WorkspaceExplorerServiceImpl:142 - end time - 384 msc 0 sec +2016-04-11 09:26:03 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:26:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:26:27 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:26:27 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:26:27 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-11 09:26:27 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:26:27 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:26:28 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:26:28 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:26:28 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:26:28 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-11 09:26:28 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-11 09:26:28 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-11 09:26:28 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-11 09:26:28 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-11 09:26:28 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-11 09:26:28 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:26:28 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:26:28 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:26:28 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:26:28 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-11 09:26:28 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:26:28 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:26:28 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:26:28 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:26:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:26:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-11 09:26:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:26:28 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:26:28 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:26:28 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:26:28 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:26:28 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:26:28 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:26:28 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:26:28 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-11 09:26:28 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:26:28 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-11 09:26:28 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:26:28 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-11 09:26:28 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:26:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-11 09:26:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:26:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:26:28 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:26:28 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-11 09:26:28 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:26:28 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:26:28 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-11 09:26:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:26:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-11 09:26:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:26:28 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-11 09:26:28 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-11 09:26:28 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:26:28 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:26:28 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:26:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-11 09:26:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-11 09:26:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:26:28 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:26:28 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-11 09:26:28 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:26:28 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:26:28 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:26:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-11 09:26:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-11 09:26:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:26:28 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:26:28 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-11 09:26:28 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:26:28 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:26:28 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-11 09:26:28 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-11 09:26:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-11 09:26:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-11 09:26:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:26:28 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-11 09:26:28 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-11 09:26:28 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:26:28 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:26:28 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:26:28 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-11 09:26:28 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-11 09:26:28 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:26:28 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:26:28 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:26:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:26:28 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:26:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:26:28 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:26:28 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:26:28 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:26:28 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:26:28 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:26:28 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:26:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:26:28 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:26:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:26:28 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:26:28 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:26:28 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:26:28 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:26:28 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:26:28 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:26:28 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:26:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:26:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:26:28 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:26:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:26:28 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:26:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:26:28 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:26:28 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:26:28 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:26:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:26:28 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:26:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:26:28 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:26:28 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:26:28 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:26:28 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:26:28 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:26:28 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:26:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:28 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:26:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:28 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:26:28 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:26:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:26:28 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:26:28 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:28 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:28 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:26:28 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:26:28 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:26:28 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:26:28 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:26:28 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:26:28 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:26:28 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:26:28 INFO WorkspaceExplorerServiceImpl:142 - end time - 201 msc 0 sec +2016-04-11 09:26:28 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:26:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:26:34 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:26:34 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:26:34 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-11 09:26:34 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:26:34 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:26:36 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:26:36 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:26:36 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:26:36 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:26:36 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:26:36 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-11 09:26:36 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-11 09:26:36 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-11 09:26:36 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:26:36 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:26:36 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:26:36 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:26:36 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:26:36 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:26:36 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:26:36 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:26:36 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:26:36 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:26:36 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:26:36 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:26:36 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:26:36 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:26:36 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:26:36 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:26:36 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:26:36 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:26:36 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:26:36 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:26:36 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:26:36 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:26:36 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:26:36 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:26:36 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:26:36 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:26:36 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:26:36 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:26:36 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:26:36 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:26:36 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:26:36 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:26:36 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:26:36 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:26:36 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:26:36 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:26:36 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:26:36 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:26:36 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-11 09:26:36 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:26:36 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:26:36 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-11 09:26:36 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-11 09:26:36 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-11 09:26:36 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:26:36 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-11 09:26:36 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-11 09:26:36 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:26:36 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:26:36 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:26:36 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-11 09:26:36 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-11 09:26:36 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:26:36 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:26:36 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-11 09:26:36 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:26:36 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:26:36 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-11 09:26:36 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-11 09:26:36 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-11 09:26:36 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:26:36 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-11 09:26:36 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:26:36 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:26:36 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:26:36 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:26:36 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:26:36 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:26:36 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:26:36 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:26:36 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:26:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:26:36 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:26:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:26:36 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:26:36 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:26:36 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:26:36 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:26:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:26:36 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:26:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:26:36 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:26:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:26:36 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:26:36 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:26:36 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:26:36 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:26:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:26:36 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:26:36 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:26:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:26:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:26:36 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:26:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:26:36 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:26:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:26:36 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:26:36 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:26:36 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:26:36 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:26:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:26:36 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:26:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:26:36 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:26:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:26:36 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:26:36 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:26:36 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:26:36 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:26:36 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:26:36 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:26:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:26:36 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:26:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:26:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:26:36 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:26:36 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:26:36 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:26:36 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:26:36 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:26:36 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:26:36 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:26:36 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:26:36 INFO WorkspaceExplorerServiceImpl:142 - end time - 189 msc 0 sec +2016-04-11 09:26:36 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:26:44 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:26:44 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:27:39 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:27:39 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:28:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:28:34 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:29:29 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:29:29 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:30:47 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-11 09:30:47 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-11 09:30:47 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-11 09:30:47 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 09:30:47 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:30:47 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:30:47 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 09:30:47 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@6731ebfa +2016-04-11 09:30:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:30:47 INFO ASLSession:352 - Logging the entrance +2016-04-11 09:30:47 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 09:30:47 DEBUG TemplateModel:83 - 2016-04-11 09:30:47, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 09:30:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:30:47 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-11 09:30:50 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 09:30:50 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 09:30:50 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 09:30:50 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:30:50 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:30:50 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:30:51 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:30:51 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 118 ms +2016-04-11 09:30:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-11 09:30:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-11 09:30:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-11 09:30:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-11 09:30:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-11 09:30:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-11 09:30:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-11 09:30:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-11 09:30:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-11 09:30:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-11 09:30:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-11 09:30:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-11 09:30:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-11 09:30:51 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-11 09:30:51 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-11 09:30:51 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:30:51 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:30:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@f6fb63d +2016-04-11 09:30:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@38bbea7 +2016-04-11 09:30:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@74f6aa5 +2016-04-11 09:30:51 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@641b0d03 +2016-04-11 09:30:51 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 109 ms +2016-04-11 09:30:51 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-11 09:30:51 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-11 09:30:51 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-11 09:30:51 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-11 09:30:51 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-11 09:30:51 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:30:51 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:30:51 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-11 09:30:51 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 26 ms +2016-04-11 09:30:51 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-11 09:30:51 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:30:51 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-11 09:30:51 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:30:52 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:30:52 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-11 09:30:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:30:55 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:30:55 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:30:55 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-11 09:30:55 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:30:55 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:30:55 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:30:55 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:30:55 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:30:55 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:30:55 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:30:55 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-11 09:30:55 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-11 09:30:55 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:30:55 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:30:55 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:30:55 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:30:55 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:30:55 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:30:55 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:30:55 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:30:55 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:30:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:30:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:30:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:30:55 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:30:55 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:30:55 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:30:55 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:30:55 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:30:55 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:30:55 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:30:55 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:30:55 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:30:55 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:30:55 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:30:55 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:30:55 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:30:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:30:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:30:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:30:55 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:30:55 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:30:55 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:30:55 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:30:55 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:30:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:30:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:30:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:30:55 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:30:55 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-11 09:30:55 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:30:55 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:30:55 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:30:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-11 09:30:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-11 09:30:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:30:55 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:30:55 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:30:55 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:30:55 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:30:55 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:30:55 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-11 09:30:55 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:30:55 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:30:55 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:30:55 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-11 09:30:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:30:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:30:55 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:30:55 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:30:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:30:55 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:30:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:30:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:30:55 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:30:55 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:30:56 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:30:56 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:30:56 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:30:56 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:30:56 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:30:56 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-11 09:30:56 DEBUG JCRRepository:271 - Initialize repository +2016-04-11 09:30:56 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-11 09:30:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:30:56 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:30:56 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-11 09:30:56 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-11 09:30:56 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-11 09:30:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:30:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:30:56 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:30:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:30:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:30:56 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-11 09:30:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:30:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:30:56 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-11 09:30:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:30:56 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:30:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:30:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:30:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:30:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:30:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:30:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:30:56 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:30:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:30:56 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:30:56 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:30:56 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:30:56 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:30:56 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:30:56 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:30:56 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-11 09:30:56 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:30:56 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:30:56 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-11 09:30:56 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-11 09:30:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:30:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:30:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:30:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:30:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:30:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:30:56 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:30:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:30:56 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:30:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:30:56 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:30:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:30:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:30:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:30:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:30:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:30:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:30:56 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:30:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:30:56 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:30:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:30:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:30:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:30:56 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:30:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:30:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:30:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:30:56 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:30:56 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:30:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-11 09:30:56 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:30:56 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:30:56 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:30:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 28 ms +2016-04-11 09:30:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-11 09:30:57 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:30:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-11 09:30:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 17 ms +2016-04-11 09:30:57 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-11 09:30:57 INFO ISClientConnector:82 - found only one RR, take it +2016-04-11 09:30:57 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-11 09:30:57 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-11 09:30:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-11 09:30:57 DEBUG StorageClient:517 - set scope: /gcube +2016-04-11 09:30:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-11 09:30:57 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:30:57 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:30:57 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 17 ms +2016-04-11 09:30:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-11 09:30:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-11 09:30:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-11 09:30:57 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:30:57 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:30:57 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:30:57 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:30:57 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:30:57 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:30:57 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:30:57 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:30:57 INFO WorkspaceExplorerServiceImpl:142 - end time - 445 msc 0 sec +2016-04-11 09:30:57 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:30:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:30:59 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:30:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:30:59 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-11 09:30:59 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:30:59 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:30:59 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:30:59 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:30:59 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:30:59 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:30:59 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:30:59 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-11 09:30:59 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-11 09:30:59 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-11 09:30:59 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:30:59 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:30:59 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:30:59 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:30:59 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:30:59 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:30:59 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:30:59 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:30:59 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:30:59 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:30:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:30:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:30:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:30:59 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:30:59 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:30:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:30:59 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:30:59 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:30:59 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:30:59 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:30:59 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:30:59 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:30:59 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:30:59 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:30:59 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:30:59 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:30:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:30:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:30:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:30:59 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:30:59 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:30:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:30:59 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:30:59 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:30:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:30:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:30:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:30:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:30:59 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-11 09:30:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:30:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:30:59 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-11 09:30:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-11 09:30:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-11 09:30:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:30:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-11 09:30:59 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-11 09:30:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:30:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:30:59 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:30:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-11 09:30:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-11 09:30:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:30:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:30:59 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-11 09:30:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:30:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:30:59 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-11 09:30:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-11 09:30:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-11 09:30:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:30:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-11 09:30:59 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:30:59 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:30:59 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:30:59 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:30:59 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:30:59 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:30:59 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:30:59 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:30:59 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:30:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:30:59 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:30:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:30:59 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:30:59 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:30:59 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:30:59 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:30:59 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:30:59 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:30:59 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:30:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:31:00 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:31:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:31:00 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:31:00 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:31:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:31:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:31:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:31:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:31:00 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:31:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:31:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:31:00 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:31:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:31:00 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:31:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:31:00 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:31:00 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:31:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:31:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:31:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:31:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:31:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:31:00 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:31:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:31:00 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:31:00 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:31:00 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:31:00 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:31:00 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:31:00 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:31:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:00 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:31:00 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:31:00 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:00 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:00 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:31:00 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:31:00 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:31:00 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:31:00 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:31:00 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:31:00 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:31:00 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:31:00 INFO WorkspaceExplorerServiceImpl:142 - end time - 203 msc 0 sec +2016-04-11 09:31:00 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:31:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:31:06 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:31:06 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:31:06 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-11 09:31:06 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:31:06 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:31:06 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:31:06 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:31:06 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:31:06 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-11 09:31:06 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-11 09:31:06 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-11 09:31:06 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-11 09:31:06 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-11 09:31:06 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-11 09:31:06 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:31:06 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:31:06 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:31:06 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:31:06 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-11 09:31:06 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:31:06 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:31:06 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:31:06 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:31:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:31:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-11 09:31:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:06 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:31:06 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:31:06 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:31:06 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:31:06 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:31:06 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:31:06 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:31:06 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-11 09:31:06 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:31:06 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-11 09:31:06 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:31:06 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-11 09:31:06 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:31:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-11 09:31:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:31:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:06 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:31:06 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-11 09:31:06 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:31:06 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:31:06 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-11 09:31:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:31:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-11 09:31:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:06 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-11 09:31:06 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-11 09:31:06 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:31:06 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:31:06 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:31:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-11 09:31:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-11 09:31:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:06 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:31:06 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-11 09:31:06 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:31:06 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:31:06 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:31:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-11 09:31:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-11 09:31:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:06 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:31:06 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-11 09:31:06 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:31:06 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:31:06 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-11 09:31:06 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-11 09:31:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-11 09:31:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-11 09:31:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:06 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-11 09:31:06 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-11 09:31:06 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:31:06 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:31:06 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:31:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-11 09:31:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-11 09:31:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:06 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:31:06 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:31:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:31:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:31:06 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:31:06 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:31:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:31:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:31:06 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:31:06 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:31:06 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:31:06 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:31:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:31:06 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:31:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:06 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:31:06 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:31:06 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:31:06 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:31:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:31:06 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:31:06 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:31:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:31:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:31:06 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:31:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:31:06 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:31:06 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:31:06 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:31:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:31:06 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:31:06 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:31:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:31:06 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:31:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:31:06 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:31:06 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:31:06 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:31:06 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:31:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:31:06 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:31:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:31:06 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:31:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:31:06 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:31:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:06 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:06 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:06 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:31:06 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:31:06 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:31:06 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:31:06 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:31:06 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:31:06 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:31:06 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:31:06 INFO WorkspaceExplorerServiceImpl:142 - end time - 181 msc 0 sec +2016-04-11 09:31:06 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:31:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:31:09 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:31:09 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:31:09 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS +2016-04-11 09:31:09 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:31:09 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:31:09 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS + XMEANS + A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + + + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + + + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:31:09 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:31:09 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:31:09 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:31:09 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:31:09 DEBUG SClient4WPS:290 - WPSClient->Input: + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + +2016-04-11 09:31:09 DEBUG SClient4WPS:290 - WPSClient->Input: + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + +2016-04-11 09:31:09 DEBUG SClient4WPS:290 - WPSClient->Input: + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + +2016-04-11 09:31:09 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:31:09 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:31:09 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:31:09 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:31:09 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:31:09 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:31:09 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:31:09 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:31:09 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:31:09 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:31:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:31:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:31:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:09 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:31:09 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:31:09 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:31:09 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:31:09 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:31:09 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:31:09 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:31:09 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:31:09 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:31:09 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:31:09 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:31:09 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:31:09 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:31:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:31:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:31:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:09 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:31:09 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:31:09 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:31:09 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:31:09 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:31:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:31:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:31:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:31:09 DEBUG WPS2SM:254 - Conversion to SM Type->maxIterations is a Literal Input +2016-04-11 09:31:09 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:31:09 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:31:09 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:31:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:XMeans max number of overall iterations of the clustering learning +2016-04-11 09:31:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxIterations +2016-04-11 09:31:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:31:09 DEBUG WPS2SM:254 - Conversion to SM Type->minClusters is a Literal Input +2016-04-11 09:31:09 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:31:09 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:31:09 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:31:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:minimum number of expected clusters +2016-04-11 09:31:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minClusters +2016-04-11 09:31:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:31:09 DEBUG WPS2SM:254 - Conversion to SM Type->maxClusters is a Literal Input +2016-04-11 09:31:09 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:31:09 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:31:09 DEBUG WPS2SM:101 - Guessed default value: 50 +2016-04-11 09:31:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of clusters to produce +2016-04-11 09:31:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxClusters +2016-04-11 09:31:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT] +2016-04-11 09:31:09 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:31:09 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:31:09 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:31:09 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:31:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:31:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:31:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:31:09 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:31:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:31:09 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:31:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:31:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:31:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:31:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:31:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:31:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:31:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:31:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:31:09 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:31:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:31:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:31:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:31:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:31:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:31:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:31:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:31:09 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:31:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:31:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:31:09 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:31:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:31:09 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:31:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:31:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:31:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:31:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:31:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:31:09 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:31:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:31:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:31:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:31:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:31:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:31:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:31:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:31:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:31:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:31:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:31:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:31:09 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:31:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:09 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:31:09 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:31:09 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:31:09 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:31:09 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:31:09 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:31:09 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:31:09 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:31:09 INFO WorkspaceExplorerServiceImpl:142 - end time - 165 msc 0 sec +2016-04-11 09:31:09 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:31:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:31:11 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:31:11 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:31:11 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-11 09:31:11 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:31:11 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:31:12 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:31:12 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:31:12 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:31:12 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-11 09:31:12 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-11 09:31:12 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-11 09:31:12 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-11 09:31:12 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-11 09:31:12 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-11 09:31:12 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:31:12 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:31:12 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:31:12 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:31:12 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-11 09:31:12 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:31:12 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:31:12 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:31:12 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:31:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:31:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-11 09:31:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:12 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:31:12 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:31:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:31:12 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:31:12 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:31:12 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:31:12 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:31:12 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-11 09:31:12 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:31:12 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-11 09:31:12 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:31:12 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-11 09:31:12 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:31:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-11 09:31:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:31:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:12 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:31:12 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-11 09:31:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:31:12 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:31:12 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-11 09:31:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:31:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-11 09:31:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-11 09:31:12 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-11 09:31:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:31:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:31:12 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:31:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-11 09:31:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-11 09:31:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:31:12 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-11 09:31:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:31:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:31:12 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:31:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-11 09:31:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-11 09:31:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:31:12 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-11 09:31:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:31:12 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:31:12 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-11 09:31:12 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-11 09:31:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-11 09:31:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-11 09:31:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:12 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-11 09:31:12 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-11 09:31:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:31:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:31:12 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:31:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-11 09:31:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-11 09:31:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:31:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:31:12 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:31:12 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:31:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:31:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:31:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:31:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:31:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:31:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:31:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:31:12 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:31:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:31:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:31:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:31:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:31:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:31:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:31:12 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:31:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:31:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:12 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:31:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:31:12 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:31:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:31:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:31:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:31:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:31:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:31:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:31:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:31:12 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:31:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:31:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:31:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:31:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:31:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:31:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:31:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:31:12 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:31:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:31:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:31:12 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:31:12 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:31:12 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:31:12 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:31:12 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:31:12 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:31:12 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:31:12 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:31:12 INFO WorkspaceExplorerServiceImpl:142 - end time - 205 msc 0 sec +2016-04-11 09:31:12 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:31:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:31:42 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:32:21 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-11 09:32:21 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-11 09:32:21 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-11 09:32:21 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 09:32:21 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:32:21 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:32:21 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 09:32:21 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@c9c577e +2016-04-11 09:32:21 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:32:21 INFO ASLSession:352 - Logging the entrance +2016-04-11 09:32:21 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 09:32:21 DEBUG TemplateModel:83 - 2016-04-11 09:32:21, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 09:32:21 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:32:21 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-11 09:32:25 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 09:32:25 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 09:32:25 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 09:32:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:32:25 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:32:25 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:32:25 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:32:25 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 127 ms +2016-04-11 09:32:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-11 09:32:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-11 09:32:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-11 09:32:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-11 09:32:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-11 09:32:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-11 09:32:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-11 09:32:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-11 09:32:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-11 09:32:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-11 09:32:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-11 09:32:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-11 09:32:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-11 09:32:25 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-11 09:32:25 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-11 09:32:30 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:32:30 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:32:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@3e72ce86 +2016-04-11 09:32:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@65f8f780 +2016-04-11 09:32:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@7d109203 +2016-04-11 09:32:30 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@28ea1197 +2016-04-11 09:32:30 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 94 ms +2016-04-11 09:32:30 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-11 09:32:31 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-11 09:32:31 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-11 09:32:31 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-11 09:32:31 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-11 09:32:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:32:31 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:32:31 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-11 09:32:31 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 32 ms +2016-04-11 09:32:31 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-11 09:32:31 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:32:31 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-11 09:32:31 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:32:31 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:32:31 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-11 09:32:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:32:34 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:32:34 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:32:34 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-11 09:32:34 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:32:34 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:32:34 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:32:34 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:32:34 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:32:34 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:32:34 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:32:34 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-11 09:32:34 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-11 09:32:34 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:32:34 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:32:34 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:32:34 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:32:34 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:32:34 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:32:34 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:32:34 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:32:34 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:32:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:32:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:32:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:34 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:32:34 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:32:34 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:34 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:32:34 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:32:34 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:32:34 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:32:34 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:32:34 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:32:34 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:32:34 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:32:34 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:32:34 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:32:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:32:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:32:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:34 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:32:34 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:32:34 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:34 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:32:34 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:32:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:32:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:32:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:32:34 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-11 09:32:34 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:34 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:32:34 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:32:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-11 09:32:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-11 09:32:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:32:34 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:32:34 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:34 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:32:34 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:32:34 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-11 09:32:34 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:32:34 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:34 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:32:34 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-11 09:32:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:32:34 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:32:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:32:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:32:34 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:32:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:32:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:32:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:32:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:32:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:32:34 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:32:34 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:32:34 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:32:34 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:32:34 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:32:34 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-11 09:32:34 DEBUG JCRRepository:271 - Initialize repository +2016-04-11 09:32:34 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-11 09:32:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 31 +2016-04-11 09:32:34 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:32:34 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-11 09:32:34 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 18 ms +2016-04-11 09:32:34 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-11 09:32:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:32:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:32:34 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:32:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:32:34 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-11 09:32:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:32:34 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:32:34 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-11 09:32:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:32:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:32:34 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:32:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:32:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:32:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:32:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:32:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:32:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:32:34 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:32:34 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:32:34 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:32:34 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:32:34 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:32:34 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:32:35 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:32:35 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:32:35 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-11 09:32:35 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:32:35 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:32:35 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-11 09:32:35 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-11 09:32:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:32:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:32:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:32:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:32:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:32:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:32:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:32:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:35 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:32:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:32:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:32:35 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:32:35 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:32:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:35 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-11 09:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:32:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:32:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:32:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 24 ms +2016-04-11 09:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-11 09:32:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:32:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-11 09:32:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-11 09:32:35 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-11 09:32:35 INFO ISClientConnector:82 - found only one RR, take it +2016-04-11 09:32:35 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-11 09:32:35 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-11 09:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-11 09:32:35 DEBUG StorageClient:517 - set scope: /gcube +2016-04-11 09:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-11 09:32:35 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:32:35 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:32:35 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-11 09:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-11 09:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-11 09:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 32 +2016-04-11 09:32:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:32:35 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:32:35 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:32:35 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:32:35 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:32:35 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:32:35 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:32:35 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:32:35 INFO WorkspaceExplorerServiceImpl:142 - end time - 434 msc 0 sec +2016-04-11 09:32:35 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:32:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:32:37 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:32:37 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:32:37 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-11 09:32:37 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:32:37 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:32:37 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:32:37 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:32:37 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:32:37 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:32:37 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:32:37 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-11 09:32:37 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-11 09:32:37 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-11 09:32:37 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:32:37 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:32:37 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:32:37 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:32:37 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:32:37 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:32:37 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:32:37 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:32:37 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:32:37 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:32:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:32:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:32:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:37 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:32:37 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:32:37 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:37 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:32:37 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:32:37 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:32:37 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:32:37 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:32:37 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:32:37 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:32:37 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:32:37 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:32:37 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:32:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:32:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:32:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:37 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:32:37 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:32:37 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:37 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:32:37 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:32:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:32:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:32:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:37 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:32:37 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-11 09:32:37 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:37 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:32:37 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-11 09:32:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-11 09:32:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-11 09:32:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:37 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-11 09:32:37 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-11 09:32:37 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:37 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:32:37 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:32:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-11 09:32:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-11 09:32:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:37 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:32:37 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-11 09:32:37 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:37 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:32:37 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-11 09:32:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-11 09:32:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-11 09:32:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:37 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-11 09:32:37 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:32:37 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:37 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:32:37 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:32:37 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:32:37 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:32:37 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:37 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:32:37 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:32:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:32:37 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:32:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:32:37 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:32:37 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:32:37 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:32:37 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:32:37 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:32:37 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:32:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:32:37 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:32:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:32:37 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:32:37 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:32:37 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:32:37 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:32:37 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:32:37 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:32:37 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:32:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:32:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:32:37 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:32:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:32:37 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:32:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:32:37 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:32:37 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:32:37 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:32:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:32:37 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:32:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:32:37 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:32:37 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:32:37 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:32:37 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:32:37 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:32:37 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:32:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:37 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:32:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:37 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:32:37 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:32:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:37 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:37 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:32:37 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:32:37 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:38 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:38 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:38 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:32:38 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:32:38 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:32:38 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:32:38 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:32:38 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:32:38 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:32:38 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:32:38 INFO WorkspaceExplorerServiceImpl:142 - end time - 219 msc 0 sec +2016-04-11 09:32:38 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:32:40 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:32:40 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:32:40 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:32:40 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-11 09:32:40 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:32:40 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:32:41 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:32:41 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:32:41 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:32:41 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-11 09:32:41 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-11 09:32:41 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-11 09:32:41 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-11 09:32:41 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-11 09:32:41 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-11 09:32:41 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:32:41 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:32:41 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:32:41 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:32:41 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-11 09:32:41 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:32:41 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:32:41 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:32:41 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:32:41 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:32:41 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-11 09:32:41 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:41 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:32:41 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:32:41 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:41 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:32:41 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:32:41 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:32:41 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:32:41 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-11 09:32:41 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:32:41 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-11 09:32:41 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:32:41 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-11 09:32:41 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:32:41 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-11 09:32:41 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:32:41 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:41 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:32:41 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-11 09:32:41 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:41 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:32:41 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-11 09:32:41 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:32:41 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-11 09:32:41 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:41 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-11 09:32:41 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-11 09:32:41 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:41 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:32:41 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:32:41 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-11 09:32:41 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-11 09:32:41 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:41 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:32:41 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-11 09:32:41 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:41 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:32:41 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:32:41 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-11 09:32:41 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-11 09:32:41 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:41 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:32:41 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-11 09:32:41 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:41 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:32:41 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-11 09:32:41 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-11 09:32:41 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-11 09:32:41 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-11 09:32:41 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:41 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-11 09:32:41 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-11 09:32:41 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:41 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:32:41 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:32:41 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-11 09:32:41 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-11 09:32:41 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:41 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:32:41 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:32:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:32:41 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:32:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:32:41 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:32:41 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:32:41 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:32:41 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:32:41 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:32:41 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:32:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:32:41 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:32:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:32:41 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:32:41 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:32:41 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:32:41 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:32:41 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:32:41 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:32:41 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:32:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:32:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:32:41 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:32:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:32:41 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:32:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:32:41 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:32:41 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:32:41 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:32:41 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:32:41 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:32:41 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:32:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:32:41 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:32:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:32:41 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:32:41 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:32:41 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:32:41 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:32:41 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:32:41 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:32:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:41 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:32:41 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:32:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:41 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:41 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:41 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:32:41 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:32:41 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:32:41 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:32:41 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:32:41 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:32:41 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:32:41 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:32:41 INFO WorkspaceExplorerServiceImpl:142 - end time - 215 msc 0 sec +2016-04-11 09:32:41 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:32:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:32:47 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:32:47 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:32:47 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-11 09:32:47 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:32:47 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:32:47 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:32:47 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:32:47 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:32:47 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:32:47 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:32:47 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-11 09:32:47 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-11 09:32:47 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:32:47 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:32:47 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:32:47 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:32:47 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:32:47 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:32:47 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:32:47 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:32:47 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:32:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:32:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:32:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:47 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:32:47 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:32:47 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:47 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:32:47 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:32:47 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:32:47 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:32:47 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:32:47 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:32:47 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:32:47 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:32:47 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:32:47 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:32:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:32:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:32:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:47 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:32:47 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:32:47 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:47 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:32:47 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:32:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:32:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:32:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:47 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:32:47 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-11 09:32:47 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:47 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:32:47 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:32:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-11 09:32:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-11 09:32:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:47 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:32:47 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:32:47 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:32:47 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:32:47 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:32:47 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-11 09:32:47 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:32:47 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:32:47 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:32:47 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-11 09:32:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:32:47 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:32:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:32:47 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:32:47 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:32:47 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:32:47 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:32:47 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:32:47 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:32:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:32:47 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:32:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:32:47 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:32:47 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:32:47 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:32:47 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:32:47 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:32:47 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:32:47 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:32:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:32:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:32:47 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:32:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:32:47 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:32:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:32:47 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:32:47 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:32:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:32:47 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:32:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:32:47 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:32:47 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:32:47 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:32:47 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:32:47 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:32:47 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:32:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:47 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:32:47 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:32:47 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:32:47 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:32:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:47 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:32:47 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:32:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:47 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:32:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:47 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:32:47 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:32:48 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:32:48 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:32:48 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:32:48 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:32:48 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:32:48 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:32:48 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:32:48 INFO WorkspaceExplorerServiceImpl:142 - end time - 178 msc 0 sec +2016-04-11 09:32:48 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:33:16 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:33:16 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:33:59 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-11 09:33:59 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-11 09:33:59 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-11 09:33:59 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 09:33:59 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:33:59 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:33:59 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 09:33:59 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@53b12918 +2016-04-11 09:33:59 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:33:59 INFO ASLSession:352 - Logging the entrance +2016-04-11 09:33:59 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 09:33:59 DEBUG TemplateModel:83 - 2016-04-11 09:33:59, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 09:33:59 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:33:59 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-11 09:34:01 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 09:34:01 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 09:34:01 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 09:34:01 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:34:01 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:34:01 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:34:02 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:34:02 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 153 ms +2016-04-11 09:34:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-11 09:34:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-11 09:34:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-11 09:34:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-11 09:34:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-11 09:34:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-11 09:34:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-11 09:34:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-11 09:34:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-11 09:34:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-11 09:34:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-11 09:34:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-11 09:34:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-11 09:34:02 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-11 09:34:02 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-11 09:34:02 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:34:02 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:34:02 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@4d53b829 +2016-04-11 09:34:02 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@6299f368 +2016-04-11 09:34:02 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@32e54f23 +2016-04-11 09:34:02 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@37424ff +2016-04-11 09:34:02 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 116 ms +2016-04-11 09:34:02 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-11 09:34:02 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-11 09:34:02 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-11 09:34:02 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-11 09:34:02 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-11 09:34:02 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:34:02 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:34:02 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-11 09:34:02 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 31 ms +2016-04-11 09:34:02 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-11 09:34:02 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:34:02 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-11 09:34:02 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:34:03 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:34:03 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-11 09:34:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:34:06 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:34:06 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:34:06 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-11 09:34:06 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:34:06 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:34:06 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:34:06 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:34:06 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:34:06 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:34:06 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:34:06 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-11 09:34:06 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-11 09:34:06 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:34:06 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:34:06 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:34:06 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:34:06 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:34:06 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:34:06 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:34:06 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:34:06 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:34:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:34:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:34:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:06 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:34:06 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:34:06 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:06 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:34:06 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:34:06 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:34:06 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:34:06 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:34:06 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:34:06 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:34:06 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:34:06 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:34:06 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:34:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:34:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:34:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:06 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:34:06 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:34:06 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:06 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:34:06 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:34:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:34:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:34:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:06 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:34:06 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-11 09:34:06 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:06 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:34:06 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:34:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-11 09:34:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-11 09:34:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:06 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:34:06 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:34:06 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:06 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:34:06 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:34:06 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-11 09:34:06 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:34:06 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:06 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:34:06 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-11 09:34:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:34:06 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:34:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:34:06 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:06 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:34:06 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:34:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:34:06 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:06 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:06 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:34:06 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:34:06 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:34:06 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:34:06 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:34:06 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-11 09:34:06 DEBUG JCRRepository:271 - Initialize repository +2016-04-11 09:34:06 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-11 09:34:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 34 +2016-04-11 09:34:06 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:34:06 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-11 09:34:06 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-11 09:34:06 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-11 09:34:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:34:06 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:06 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:06 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-11 09:34:06 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:34:06 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:06 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:06 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:06 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-11 09:34:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:34:06 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:34:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:34:06 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:06 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:06 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:34:06 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:34:06 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:34:06 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:06 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:06 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:06 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:06 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:07 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:34:07 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:34:07 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-11 09:34:07 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:34:07 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:34:07 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-11 09:34:07 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-11 09:34:07 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:07 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:07 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:07 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:07 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:07 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:07 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:07 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:07 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:34:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:34:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:34:07 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:34:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:34:07 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:34:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:07 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:07 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:07 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:34:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:34:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:34:07 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:34:07 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:34:07 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 33 ms +2016-04-11 09:34:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:34:07 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:34:07 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-11 09:34:07 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-11 09:34:07 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-11 09:34:07 INFO ISClientConnector:82 - found only one RR, take it +2016-04-11 09:34:07 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-11 09:34:07 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-11 09:34:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:34:07 DEBUG StorageClient:517 - set scope: /gcube +2016-04-11 09:34:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:34:07 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:34:07 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:34:07 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 18 ms +2016-04-11 09:34:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:34:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:34:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:34:07 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:34:07 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:34:07 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:34:07 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:34:07 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:34:07 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:34:07 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:34:07 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:34:07 INFO WorkspaceExplorerServiceImpl:142 - end time - 433 msc 0 sec +2016-04-11 09:34:07 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:34:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:34:09 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:34:09 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:34:09 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-11 09:34:09 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:34:09 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:34:09 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:34:09 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:34:09 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:34:09 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:34:09 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:34:09 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-11 09:34:09 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-11 09:34:09 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-11 09:34:09 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:34:09 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:34:09 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:34:09 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:34:09 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:34:09 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:34:09 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:34:09 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:34:09 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:34:09 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:34:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:34:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:34:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:09 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:34:09 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:34:09 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:09 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:34:09 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:34:09 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:34:09 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:34:09 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:34:09 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:34:09 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:34:09 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:34:09 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:34:09 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:34:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:34:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:34:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:09 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:34:09 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:34:09 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:09 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:34:09 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:34:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:34:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:34:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:34:09 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-11 09:34:09 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:09 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:34:09 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-11 09:34:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-11 09:34:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-11 09:34:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-11 09:34:09 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-11 09:34:09 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:09 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:34:09 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:34:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-11 09:34:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-11 09:34:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:34:09 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-11 09:34:09 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:09 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:34:09 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-11 09:34:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-11 09:34:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-11 09:34:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-11 09:34:09 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:34:09 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:09 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:34:09 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:34:09 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:34:09 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:34:09 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:09 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:34:09 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:34:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:34:09 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:34:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:34:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:34:09 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:34:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:34:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:09 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:34:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:34:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:34:09 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:34:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:34:09 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:34:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:34:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:34:09 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:34:09 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:34:09 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:09 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:09 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:09 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:09 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:09 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:09 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:09 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:10 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:34:10 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:34:10 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:10 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:10 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:34:10 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:34:10 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:34:10 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:34:10 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:34:10 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:34:10 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:34:10 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:34:10 INFO WorkspaceExplorerServiceImpl:142 - end time - 228 msc 0 sec +2016-04-11 09:34:10 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:34:11 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:34:11 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:34:11 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:34:11 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-11 09:34:11 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:34:11 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:34:12 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:34:12 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:34:12 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:34:12 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-11 09:34:12 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-11 09:34:12 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-11 09:34:12 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-11 09:34:12 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-11 09:34:12 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-11 09:34:12 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:34:12 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:34:12 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:34:12 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:34:12 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-11 09:34:12 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:34:12 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:34:12 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:34:12 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:34:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:34:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-11 09:34:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:12 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:34:12 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:34:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:12 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:34:12 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:34:12 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:34:12 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:34:12 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-11 09:34:12 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:34:12 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-11 09:34:12 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:34:12 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-11 09:34:12 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:34:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-11 09:34:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:34:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:12 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:34:12 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-11 09:34:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:12 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:34:12 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-11 09:34:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:34:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-11 09:34:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-11 09:34:12 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-11 09:34:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:34:12 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:34:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-11 09:34:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-11 09:34:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:34:12 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-11 09:34:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:34:12 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:34:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-11 09:34:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-11 09:34:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:34:12 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-11 09:34:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:12 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:34:12 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-11 09:34:12 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-11 09:34:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-11 09:34:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-11 09:34:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:12 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-11 09:34:12 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-11 09:34:12 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:12 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:34:12 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:34:12 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-11 09:34:12 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-11 09:34:12 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:12 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:34:12 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:34:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:34:12 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:34:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:34:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:34:12 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:34:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:34:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:12 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:34:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:34:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:34:12 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:34:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:34:12 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:34:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:34:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:34:12 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:34:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:34:12 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:12 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:12 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:12 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:12 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:12 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:12 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:34:12 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:34:12 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:12 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:12 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:34:12 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:34:12 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:34:12 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:34:12 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:34:12 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:34:12 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:34:12 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:34:12 INFO WorkspaceExplorerServiceImpl:142 - end time - 166 msc 0 sec +2016-04-11 09:34:12 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:34:14 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:34:14 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:34:14 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:34:14 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS +2016-04-11 09:34:14 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:34:14 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:34:15 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS + XMEANS + A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + + + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + + + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:34:15 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:34:15 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:34:15 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:34:15 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:34:15 DEBUG SClient4WPS:290 - WPSClient->Input: + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + +2016-04-11 09:34:15 DEBUG SClient4WPS:290 - WPSClient->Input: + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + +2016-04-11 09:34:15 DEBUG SClient4WPS:290 - WPSClient->Input: + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + +2016-04-11 09:34:15 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:34:15 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:34:15 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:34:15 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:34:15 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:34:15 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:34:15 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:34:15 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:34:15 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:34:15 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:34:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:34:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:34:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:15 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:34:15 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:34:15 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:15 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:34:15 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:34:15 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:34:15 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:34:15 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:34:15 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:34:15 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:34:15 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:34:15 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:34:15 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:34:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:34:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:34:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:15 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:34:15 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:34:15 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:15 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:34:15 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:34:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:34:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:34:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:15 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:34:15 DEBUG WPS2SM:254 - Conversion to SM Type->maxIterations is a Literal Input +2016-04-11 09:34:15 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:15 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:34:15 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:34:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:XMeans max number of overall iterations of the clustering learning +2016-04-11 09:34:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxIterations +2016-04-11 09:34:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:15 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:34:15 DEBUG WPS2SM:254 - Conversion to SM Type->minClusters is a Literal Input +2016-04-11 09:34:15 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:15 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:34:15 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:34:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:minimum number of expected clusters +2016-04-11 09:34:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minClusters +2016-04-11 09:34:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:15 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:34:15 DEBUG WPS2SM:254 - Conversion to SM Type->maxClusters is a Literal Input +2016-04-11 09:34:15 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:15 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:34:15 DEBUG WPS2SM:101 - Guessed default value: 50 +2016-04-11 09:34:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of clusters to produce +2016-04-11 09:34:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxClusters +2016-04-11 09:34:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:15 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT] +2016-04-11 09:34:15 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:34:15 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:15 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:34:15 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:34:15 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:34:15 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:34:15 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:15 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:34:15 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:34:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:34:15 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:34:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:34:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:34:15 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:34:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:34:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:15 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:34:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:34:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:34:15 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:34:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:34:15 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:34:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:34:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:34:15 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:34:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:34:15 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:15 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:15 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:15 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:15 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:15 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:34:15 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:34:15 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:15 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:15 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:34:15 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:34:15 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:34:15 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:34:15 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:34:15 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:34:15 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:34:15 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:34:15 INFO WorkspaceExplorerServiceImpl:142 - end time - 226 msc 0 sec +2016-04-11 09:34:15 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:34:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:34:19 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:34:19 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:34:19 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-11 09:34:19 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:34:19 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:34:20 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:34:20 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:34:20 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:34:20 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:34:20 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:34:20 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-11 09:34:20 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-11 09:34:20 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:34:20 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:34:20 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:34:20 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:34:20 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:34:20 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:34:20 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:34:20 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:34:20 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:34:20 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:34:20 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:34:20 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:20 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:34:20 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:34:20 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:20 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:34:20 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:34:20 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:34:20 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:34:20 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:34:20 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:34:20 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:34:20 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:34:20 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:34:20 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:34:20 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:34:20 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:34:20 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:20 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:34:20 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:34:20 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:20 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:34:20 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:34:20 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:34:20 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:34:20 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:20 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:34:20 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-11 09:34:20 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:20 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:34:20 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:34:20 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-11 09:34:20 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-11 09:34:20 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:20 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:34:20 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:34:20 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:20 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:34:20 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:34:20 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-11 09:34:20 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:34:20 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:20 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:34:20 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-11 09:34:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:34:20 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:34:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:34:20 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:20 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:20 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:20 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:34:20 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:34:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:34:20 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:20 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:20 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:20 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:20 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:34:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:34:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:34:20 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:34:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:34:20 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:34:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:34:20 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:20 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:20 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:20 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:34:20 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:34:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:34:20 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:34:20 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:34:20 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:34:20 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:34:20 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:34:20 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:34:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:20 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:34:20 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:34:20 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:34:20 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:34:20 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:34:20 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:34:20 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:34:20 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:34:20 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:34:20 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:34:20 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:34:20 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:34:20 INFO WorkspaceExplorerServiceImpl:142 - end time - 191 msc 0 sec +2016-04-11 09:34:20 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:34:24 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:34:24 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:34:24 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:34:24 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER +2016-04-11 09:34:24 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:34:24 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:34:25 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER + HCAF_FILTER + An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia) + + + Table_Label + the name of the Filtered Hcaf + Name of the parameter: Table_Label. the name of the Filtered Hcaf + + + + hcaf_filtered + + + + B_Box_Left_Lower_Lat + the left lower latitude of the bounding box (range [-90,+90]) + Name of the parameter: B_Box_Left_Lower_Lat. the left lower latitude of the bounding box (range [-90,+90]) + + + + -17.098 + + + + B_Box_Left_Lower_Long + the left lower longitude of the bounding box (range [-180,+180]) + Name of the parameter: B_Box_Left_Lower_Long. the left lower longitude of the bounding box (range [-180,+180]) + + + + 89.245 + + + + B_Box_Right_Upper_Lat + the right upper latitude of the bounding box (range [-90,+90]) + Name of the parameter: B_Box_Right_Upper_Lat. the right upper latitude of the bounding box (range [-90,+90]) + + + + 25.086 + + + + B_Box_Right_Upper_Long + the right upper longitude of the bounding box (range [-180,+180]) + Name of the parameter: B_Box_Right_Upper_Long. the right upper longitude of the bounding box (range [-180,+180]) + + + + 147.642 + + + + + + OutputTable + a HCAF table focusing on the selected Bounding Box [a http link to a table in UTF-8 ecoding following this template: (HCAF) http://goo.gl/SZG9uM] + Name of the parameter: OutputTable. a HCAF table focusing on the selected Bounding Box [a http link to a table in UTF-8 ecoding following this template: (HCAF) http://goo.gl/SZG9uM] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:34:25 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:34:25 DEBUG SClient4WPS:290 - WPSClient->Input: + Table_Label + the name of the Filtered Hcaf + Name of the parameter: Table_Label. the name of the Filtered Hcaf + + + + hcaf_filtered + + +2016-04-11 09:34:25 DEBUG SClient4WPS:290 - WPSClient->Input: + B_Box_Left_Lower_Lat + the left lower latitude of the bounding box (range [-90,+90]) + Name of the parameter: B_Box_Left_Lower_Lat. the left lower latitude of the bounding box (range [-90,+90]) + + + + -17.098 + + +2016-04-11 09:34:25 DEBUG SClient4WPS:290 - WPSClient->Input: + B_Box_Left_Lower_Long + the left lower longitude of the bounding box (range [-180,+180]) + Name of the parameter: B_Box_Left_Lower_Long. the left lower longitude of the bounding box (range [-180,+180]) + + + + 89.245 + + +2016-04-11 09:34:25 DEBUG SClient4WPS:290 - WPSClient->Input: + B_Box_Right_Upper_Lat + the right upper latitude of the bounding box (range [-90,+90]) + Name of the parameter: B_Box_Right_Upper_Lat. the right upper latitude of the bounding box (range [-90,+90]) + + + + 25.086 + + +2016-04-11 09:34:25 DEBUG SClient4WPS:290 - WPSClient->Input: + B_Box_Right_Upper_Long + the right upper longitude of the bounding box (range [-180,+180]) + Name of the parameter: B_Box_Right_Upper_Long. the right upper longitude of the bounding box (range [-180,+180]) + + + + 147.642 + + +2016-04-11 09:34:25 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:34:25 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + a HCAF table focusing on the selected Bounding Box [a http link to a table in UTF-8 ecoding following this template: (HCAF) http://goo.gl/SZG9uM] + Name of the parameter: OutputTable. a HCAF table focusing on the selected Bounding Box [a http link to a table in UTF-8 ecoding following this template: (HCAF) http://goo.gl/SZG9uM] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:34:25 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:34:25 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:34:25 DEBUG WPS2SM:254 - Conversion to SM Type->Table_Label is a Literal Input +2016-04-11 09:34:25 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:25 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:34:25 DEBUG WPS2SM:101 - Guessed default value: hcaf_filtered +2016-04-11 09:34:25 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the name of the Filtered Hcaf +2016-04-11 09:34:25 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Table_Label +2016-04-11 09:34:25 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:25 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=hcaf_filtered, value=null, name=Table_Label, description=the name of the Filtered Hcaf [Min N. of Entries:1; Max N. of Entries:1; default:hcaf_filtered], typology=OBJECT] +2016-04-11 09:34:25 DEBUG WPS2SM:254 - Conversion to SM Type->B_Box_Left_Lower_Lat is a Literal Input +2016-04-11 09:34:25 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:25 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:34:25 DEBUG WPS2SM:101 - Guessed default value: -17.098 +2016-04-11 09:34:25 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the left lower latitude of the bounding box (range [-90,+90]) +2016-04-11 09:34:25 DEBUG WPS2SM:291 - Conversion to SM Type->Name:B_Box_Left_Lower_Lat +2016-04-11 09:34:25 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:25 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=-17.098, value=null, name=B_Box_Left_Lower_Lat, description=the left lower latitude of the bounding box (range [-90,+90]) [Min N. of Entries:1; Max N. of Entries:1; default:-17.098], typology=OBJECT] +2016-04-11 09:34:25 DEBUG WPS2SM:254 - Conversion to SM Type->B_Box_Left_Lower_Long is a Literal Input +2016-04-11 09:34:25 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:25 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:34:25 DEBUG WPS2SM:101 - Guessed default value: 89.245 +2016-04-11 09:34:25 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the left lower longitude of the bounding box (range [-180,+180]) +2016-04-11 09:34:25 DEBUG WPS2SM:291 - Conversion to SM Type->Name:B_Box_Left_Lower_Long +2016-04-11 09:34:25 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:25 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=89.245, value=null, name=B_Box_Left_Lower_Long, description=the left lower longitude of the bounding box (range [-180,+180]) [Min N. of Entries:1; Max N. of Entries:1; default:89.245], typology=OBJECT] +2016-04-11 09:34:25 DEBUG WPS2SM:254 - Conversion to SM Type->B_Box_Right_Upper_Lat is a Literal Input +2016-04-11 09:34:25 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:25 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:34:25 DEBUG WPS2SM:101 - Guessed default value: 25.086 +2016-04-11 09:34:25 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the right upper latitude of the bounding box (range [-90,+90]) +2016-04-11 09:34:25 DEBUG WPS2SM:291 - Conversion to SM Type->Name:B_Box_Right_Upper_Lat +2016-04-11 09:34:25 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:25 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=25.086, value=null, name=B_Box_Right_Upper_Lat, description=the right upper latitude of the bounding box (range [-90,+90]) [Min N. of Entries:1; Max N. of Entries:1; default:25.086], typology=OBJECT] +2016-04-11 09:34:25 DEBUG WPS2SM:254 - Conversion to SM Type->B_Box_Right_Upper_Long is a Literal Input +2016-04-11 09:34:25 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:34:25 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:34:25 DEBUG WPS2SM:101 - Guessed default value: 147.642 +2016-04-11 09:34:25 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the right upper longitude of the bounding box (range [-180,+180]) +2016-04-11 09:34:25 DEBUG WPS2SM:291 - Conversion to SM Type->Name:B_Box_Right_Upper_Long +2016-04-11 09:34:25 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:34:25 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=147.642, value=null, name=B_Box_Right_Upper_Long, description=the right upper longitude of the bounding box (range [-180,+180]) [Min N. of Entries:1; Max N. of Entries:1; default:147.642], typology=OBJECT] +2016-04-11 09:34:25 DEBUG SClient4WPS:649 - Parameters: [ObjectParameter [type=java.lang.String, defaultValue=hcaf_filtered, value=null, name=Table_Label, description=the name of the Filtered Hcaf [Min N. of Entries:1; Max N. of Entries:1; default:hcaf_filtered], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=-17.098, value=null, name=B_Box_Left_Lower_Lat, description=the left lower latitude of the bounding box (range [-90,+90]) [Min N. of Entries:1; Max N. of Entries:1; default:-17.098], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=89.245, value=null, name=B_Box_Left_Lower_Long, description=the left lower longitude of the bounding box (range [-180,+180]) [Min N. of Entries:1; Max N. of Entries:1; default:89.245], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=25.086, value=null, name=B_Box_Right_Upper_Lat, description=the right upper latitude of the bounding box (range [-90,+90]) [Min N. of Entries:1; Max N. of Entries:1; default:25.086], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=147.642, value=null, name=B_Box_Right_Upper_Long, description=the right upper longitude of the bounding box (range [-180,+180]) [Min N. of Entries:1; Max N. of Entries:1; default:147.642], typology=OBJECT]] +2016-04-11 09:35:15 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-11 09:35:15 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-11 09:35:15 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-11 09:35:15 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 09:35:15 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:35:15 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:35:15 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 09:35:15 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@1778565 +2016-04-11 09:35:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:35:15 INFO ASLSession:352 - Logging the entrance +2016-04-11 09:35:15 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 09:35:15 DEBUG TemplateModel:83 - 2016-04-11 09:35:15, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 09:35:15 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:35:15 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-11 09:35:18 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 09:35:18 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 09:35:18 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 09:35:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:35:18 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:35:18 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:35:18 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:35:19 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 124 ms +2016-04-11 09:35:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-11 09:35:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-11 09:35:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-11 09:35:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-11 09:35:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-11 09:35:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-11 09:35:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-11 09:35:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-11 09:35:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-11 09:35:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-11 09:35:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-11 09:35:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-11 09:35:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-11 09:35:19 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-11 09:35:19 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-11 09:35:19 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:35:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:35:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@6bb8fd4b +2016-04-11 09:35:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@50555fae +2016-04-11 09:35:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@721f66c3 +2016-04-11 09:35:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@35a46242 +2016-04-11 09:35:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 110 ms +2016-04-11 09:35:19 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-11 09:35:19 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-11 09:35:19 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-11 09:35:19 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-11 09:35:19 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-11 09:35:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:35:19 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:35:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-11 09:35:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-11 09:35:19 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-11 09:35:19 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:35:19 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-11 09:35:19 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:35:20 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:35:20 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-11 09:35:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:35:23 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:35:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:35:23 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY +2016-04-11 09:35:23 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:35:23 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:35:23 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY + CMSY + An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner. + + + IDsFile + Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK + Name of the parameter: IDsFile. Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK + + + + + + + StocksFile + Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY + Name of the parameter: StocksFile. Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY + + + + + + + SelectedStock + The stock on which the procedure has to focus e.g. HLH_M07 + Name of the parameter: SelectedStock. The stock on which the procedure has to focus e.g. HLH_M07 + + + + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:35:23 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:35:23 DEBUG SClient4WPS:290 - WPSClient->Input: + IDsFile + Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK + Name of the parameter: IDsFile. Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK + + + + + +2016-04-11 09:35:23 DEBUG SClient4WPS:290 - WPSClient->Input: + StocksFile + Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY + Name of the parameter: StocksFile. Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY + + + + + +2016-04-11 09:35:23 DEBUG SClient4WPS:290 - WPSClient->Input: + SelectedStock + The stock on which the procedure has to focus e.g. HLH_M07 + Name of the parameter: SelectedStock. The stock on which the procedure has to focus e.g. HLH_M07 + + + + + +2016-04-11 09:35:23 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:35:23 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:35:23 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:35:23 DEBUG WPS2SM:254 - Conversion to SM Type->IDsFile is a Literal Input +2016-04-11 09:35:23 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:23 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:35:23 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:35:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK +2016-04-11 09:35:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:IDsFile +2016-04-11 09:35:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=IDsFile, description=Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT] +2016-04-11 09:35:23 DEBUG WPS2SM:254 - Conversion to SM Type->StocksFile is a Literal Input +2016-04-11 09:35:23 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:23 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:35:23 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:35:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY +2016-04-11 09:35:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:StocksFile +2016-04-11 09:35:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=StocksFile, description=Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT] +2016-04-11 09:35:23 DEBUG WPS2SM:254 - Conversion to SM Type->SelectedStock is a Literal Input +2016-04-11 09:35:23 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:23 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:35:23 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:35:23 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The stock on which the procedure has to focus e.g. HLH_M07 +2016-04-11 09:35:23 DEBUG WPS2SM:291 - Conversion to SM Type->Name:SelectedStock +2016-04-11 09:35:23 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:23 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=SelectedStock, description=The stock on which the procedure has to focus e.g. HLH_M07 [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT] +2016-04-11 09:35:23 DEBUG SClient4WPS:649 - Parameters: [ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=IDsFile, description=Http link to a file containing prior information about the stocks, in WKLife IV format. Example: http://goo.gl/9rg3qK [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=StocksFile, description=Http link to a file containing catch and biomass (or CPUE) trends , in WKLife IV format. Example: http://goo.gl/Mp2ZLY [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue= , value=null, name=SelectedStock, description=The stock on which the procedure has to focus e.g. HLH_M07 [Min N. of Entries:1; Max N. of Entries:1], typology=OBJECT]] +2016-04-11 09:35:25 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:35:25 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:35:25 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:35:25 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING +2016-04-11 09:35:25 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:35:25 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:35:26 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING + MAX_ENT_NICHE_MODELLING + A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt + + + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + + + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + + + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + + + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + + + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + + + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + + + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + + + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + + + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + + + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + + + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + + + + + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + + + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + + + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + + + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + + + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + + + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:35:26 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:35:26 DEBUG SClient4WPS:290 - WPSClient->Input: + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + +2016-04-11 09:35:26 DEBUG SClient4WPS:290 - WPSClient->Input: + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + +2016-04-11 09:35:26 DEBUG SClient4WPS:290 - WPSClient->Input: + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + +2016-04-11 09:35:26 DEBUG SClient4WPS:290 - WPSClient->Input: + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + +2016-04-11 09:35:26 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:35:26 DEBUG SClient4WPS:290 - WPSClient->Input: + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + +2016-04-11 09:35:26 DEBUG SClient4WPS:290 - WPSClient->Input: + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + +2016-04-11 09:35:26 DEBUG SClient4WPS:290 - WPSClient->Input: + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + +2016-04-11 09:35:26 DEBUG SClient4WPS:290 - WPSClient->Input: + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + +2016-04-11 09:35:26 DEBUG SClient4WPS:290 - WPSClient->Input: + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + +2016-04-11 09:35:26 DEBUG SClient4WPS:290 - WPSClient->Input: + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + +2016-04-11 09:35:26 DEBUG SClient4WPS:290 - WPSClient->Input: + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + +2016-04-11 09:35:26 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:35:26 DEBUG SClient4WPS:297 - WPSClient->Output: + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + +2016-04-11 09:35:26 DEBUG SClient4WPS:297 - WPSClient->Output: + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + +2016-04-11 09:35:26 DEBUG SClient4WPS:297 - WPSClient->Output: + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + +2016-04-11 09:35:26 DEBUG SClient4WPS:297 - WPSClient->Output: + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + +2016-04-11 09:35:26 DEBUG SClient4WPS:297 - WPSClient->Output: + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + +2016-04-11 09:35:26 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:35:26 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:35:26 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:35:26 DEBUG WPS2SM:254 - Conversion to SM Type->OutputTableLabel is a Literal Input +2016-04-11 09:35:26 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:26 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:35:26 DEBUG WPS2SM:101 - Guessed default value: maxent_ +2016-04-11 09:35:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the table to produce +2016-04-11 09:35:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OutputTableLabel +2016-04-11 09:35:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:26 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT] +2016-04-11 09:35:26 DEBUG WPS2SM:254 - Conversion to SM Type->SpeciesName is a Literal Input +2016-04-11 09:35:26 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:26 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:35:26 DEBUG WPS2SM:101 - Guessed default value: generic_species +2016-04-11 09:35:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the species to model and the occurrence records refer to +2016-04-11 09:35:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:SpeciesName +2016-04-11 09:35:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:26 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT] +2016-04-11 09:35:26 DEBUG WPS2SM:254 - Conversion to SM Type->MaxIterations is a Literal Input +2016-04-11 09:35:26 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:26 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:35:26 DEBUG WPS2SM:101 - Guessed default value: 1000 +2016-04-11 09:35:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The number of learning iterations of the MaxEnt algorithm +2016-04-11 09:35:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:MaxIterations +2016-04-11 09:35:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:26 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT] +2016-04-11 09:35:26 DEBUG WPS2SM:254 - Conversion to SM Type->DefaultPrevalence is a Literal Input +2016-04-11 09:35:26 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:26 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:35:26 DEBUG WPS2SM:101 - Guessed default value: 0.5 +2016-04-11 09:35:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A priori probability of presence at ordinary occurrence points +2016-04-11 09:35:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:DefaultPrevalence +2016-04-11 09:35:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:26 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT] +2016-04-11 09:35:26 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencesTable is a Complex Input +2016-04-11 09:35:26 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:35:26 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:35:26 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:35:26 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:35:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] +2016-04-11 09:35:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencesTable +2016-04-11 09:35:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:26 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:35:26 DEBUG WPS2SM:254 - Conversion to SM Type->LongitudeColumn is a Literal Input +2016-04-11 09:35:26 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:26 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:35:26 DEBUG WPS2SM:101 - Guessed default value: decimallongitude +2016-04-11 09:35:26 DEBUG WPS2SM:130 - Machter title: The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude] +2016-04-11 09:35:26 DEBUG WPS2SM:131 - Machter find: true +2016-04-11 09:35:26 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-11 09:35:26 DEBUG WPS2SM:133 - Machter start: 40 +2016-04-11 09:35:26 DEBUG WPS2SM:134 - Machter end: 82 +2016-04-11 09:35:26 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-11 09:35:26 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-11 09:35:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing longitude values [the name of a column from OccurrencesTable] +2016-04-11 09:35:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LongitudeColumn +2016-04-11 09:35:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:26 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN] +2016-04-11 09:35:26 DEBUG WPS2SM:254 - Conversion to SM Type->LatitudeColumn is a Literal Input +2016-04-11 09:35:26 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:26 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:35:26 DEBUG WPS2SM:101 - Guessed default value: decimallatitude +2016-04-11 09:35:26 DEBUG WPS2SM:130 - Machter title: The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude] +2016-04-11 09:35:26 DEBUG WPS2SM:131 - Machter find: true +2016-04-11 09:35:26 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-11 09:35:26 DEBUG WPS2SM:133 - Machter start: 39 +2016-04-11 09:35:26 DEBUG WPS2SM:134 - Machter end: 81 +2016-04-11 09:35:26 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-11 09:35:26 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-11 09:35:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing latitude values [the name of a column from OccurrencesTable] +2016-04-11 09:35:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LatitudeColumn +2016-04-11 09:35:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:26 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN] +2016-04-11 09:35:26 DEBUG WPS2SM:254 - Conversion to SM Type->XResolution is a Literal Input +2016-04-11 09:35:26 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:26 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:35:26 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:35:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the X axis in decimal degrees +2016-04-11 09:35:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:XResolution +2016-04-11 09:35:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:26 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:35:26 DEBUG WPS2SM:254 - Conversion to SM Type->YResolution is a Literal Input +2016-04-11 09:35:26 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:26 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:35:26 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:35:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the Y axis in decimal degrees +2016-04-11 09:35:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:YResolution +2016-04-11 09:35:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:26 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:35:26 DEBUG WPS2SM:254 - Conversion to SM Type->Layers is a Literal Input +2016-04-11 09:35:26 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:26 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:35:26 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:35:26 DEBUG WPS2SM:147 - Machter title: The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:35:26 DEBUG WPS2SM:148 - Machter find: true +2016-04-11 09:35:26 DEBUG WPS2SM:149 - Machter group: a sequence of values separated by | +2016-04-11 09:35:26 DEBUG WPS2SM:150 - Machter start: 501 +2016-04-11 09:35:26 DEBUG WPS2SM:151 - Machter end: 536 +2016-04-11 09:35:26 DEBUG WPS2SM:152 - Machter Group Count: 1 +2016-04-11 09:35:26 DEBUG WPS2SM:155 - Matcher separator: | +2016-04-11 09:35:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) +2016-04-11 09:35:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Layers +2016-04-11 09:35:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:26 DEBUG SClient4WPS:645 - InputParameter: ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST] +2016-04-11 09:35:26 DEBUG WPS2SM:254 - Conversion to SM Type->Z is a Literal Input +2016-04-11 09:35:26 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:26 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:35:26 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-11 09:35:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer +2016-04-11 09:35:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Z +2016-04-11 09:35:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:26 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-11 09:35:26 DEBUG WPS2SM:254 - Conversion to SM Type->TimeIndex is a Literal Input +2016-04-11 09:35:26 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:26 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:35:26 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-11 09:35:26 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Time Index. The default is the first time indexed in the input environmental datasets +2016-04-11 09:35:26 DEBUG WPS2SM:291 - Conversion to SM Type->Name:TimeIndex +2016-04-11 09:35:26 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:26 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-11 09:35:26 DEBUG SClient4WPS:649 - Parameters: [ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT], TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN], Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST], ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT]] +2016-04-11 09:35:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:35:26 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:35:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:35:26 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:35:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:35:26 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:35:26 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:35:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:35:26 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:35:26 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:35:26 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:35:26 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:35:26 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:35:26 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:35:26 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:35:26 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-11 09:35:26 DEBUG JCRRepository:271 - Initialize repository +2016-04-11 09:35:26 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-11 09:35:26 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 36 +2016-04-11 09:35:26 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:35:26 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-11 09:35:26 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 17 ms +2016-04-11 09:35:27 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-11 09:35:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:35:27 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:35:27 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:35:27 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:35:27 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:35:27 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:35:27 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-11 09:35:27 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:35:27 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-11 09:35:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:35:27 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:35:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:35:27 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:35:27 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:35:27 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:35:27 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:35:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:35:27 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:35:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:35:27 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:35:27 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:35:27 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:35:27 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:35:27 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:35:27 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:35:27 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-11 09:35:27 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:35:27 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:35:27 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-11 09:35:27 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-11 09:35:27 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:35:27 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:35:27 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:35:27 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:35:27 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:35:27 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:35:27 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:27 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:27 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:35:27 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:35:27 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:35:27 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:27 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:27 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:27 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:27 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:35:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:35:27 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:35:27 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:35:27 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:35:27 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:27 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:27 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:27 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:27 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:27 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:27 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:27 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:35:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:35:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:35:28 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:35:28 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:35:28 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 31 ms +2016-04-11 09:35:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:35:28 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:35:28 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-11 09:35:28 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-11 09:35:28 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-11 09:35:28 INFO ISClientConnector:82 - found only one RR, take it +2016-04-11 09:35:28 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-11 09:35:28 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-11 09:35:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:35:28 DEBUG StorageClient:517 - set scope: /gcube +2016-04-11 09:35:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:35:28 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:35:28 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:35:28 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 16 ms +2016-04-11 09:35:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:35:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:35:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:35:28 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:35:28 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:35:28 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:35:28 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:35:28 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:35:28 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:35:28 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:35:28 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:35:28 INFO WorkspaceExplorerServiceImpl:142 - end time - 410 msc 0 sec +2016-04-11 09:35:28 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:35:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:35:35 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:35:35 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:35:35 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART +2016-04-11 09:35:35 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:35:35 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:35:35 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART + GEO_CHART + An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country. + + + InputTable + The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: InputTable. The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + Longitude + The column containing longitude decimal values [the name of a column from InputTable] + Name of the parameter: Longitude. The column containing longitude decimal values [the name of a column from InputTable] + + + + long + + + + Latitude + The column containing latitude decimal values [the name of a column from InputTable] + Name of the parameter: Latitude. The column containing latitude decimal values [the name of a column from InputTable] + + + + lat + + + + Quantities + The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ] + Name of the parameter: Quantities. The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ] + + + + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:35:35 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:35:35 DEBUG SClient4WPS:290 - WPSClient->Input: + InputTable + The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: InputTable. The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:35:35 DEBUG SClient4WPS:290 - WPSClient->Input: + Longitude + The column containing longitude decimal values [the name of a column from InputTable] + Name of the parameter: Longitude. The column containing longitude decimal values [the name of a column from InputTable] + + + + long + + +2016-04-11 09:35:35 DEBUG SClient4WPS:290 - WPSClient->Input: + Latitude + The column containing latitude decimal values [the name of a column from InputTable] + Name of the parameter: Latitude. The column containing latitude decimal values [the name of a column from InputTable] + + + + lat + + +2016-04-11 09:35:35 DEBUG SClient4WPS:290 - WPSClient->Input: + Quantities + The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ] + Name of the parameter: Quantities. The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ] + + + + + +2016-04-11 09:35:35 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:35:35 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:35:35 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:35:35 DEBUG WPS2SM:279 - Conversion to SM Type->InputTable is a Complex Input +2016-04-11 09:35:35 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:35:35 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:35:35 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:35:35 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:35:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:35:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:InputTable +2016-04-11 09:35:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:35 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=InputTable, description=The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:35:35 DEBUG WPS2SM:254 - Conversion to SM Type->Longitude is a Literal Input +2016-04-11 09:35:35 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:35:35 DEBUG WPS2SM:101 - Guessed default value: long +2016-04-11 09:35:35 DEBUG WPS2SM:130 - Machter title: The column containing longitude decimal values [the name of a column from InputTable] [Min N. of Entries:1; Max N. of Entries:1; default:long] +2016-04-11 09:35:35 DEBUG WPS2SM:131 - Machter find: true +2016-04-11 09:35:35 DEBUG WPS2SM:132 - Machter group: the name of a column from InputTable +2016-04-11 09:35:35 DEBUG WPS2SM:133 - Machter start: 48 +2016-04-11 09:35:35 DEBUG WPS2SM:134 - Machter end: 84 +2016-04-11 09:35:35 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-11 09:35:35 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: InputTable +2016-04-11 09:35:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing longitude decimal values [the name of a column from InputTable] +2016-04-11 09:35:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Longitude +2016-04-11 09:35:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:35 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=Longitude, description=The column containing longitude decimal values [the name of a column from InputTable] [Min N. of Entries:1; Max N. of Entries:1; default:long], typology=COLUMN] +2016-04-11 09:35:35 DEBUG WPS2SM:254 - Conversion to SM Type->Latitude is a Literal Input +2016-04-11 09:35:35 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:35:35 DEBUG WPS2SM:101 - Guessed default value: lat +2016-04-11 09:35:35 DEBUG WPS2SM:130 - Machter title: The column containing latitude decimal values [the name of a column from InputTable] [Min N. of Entries:1; Max N. of Entries:1; default:lat] +2016-04-11 09:35:35 DEBUG WPS2SM:131 - Machter find: true +2016-04-11 09:35:35 DEBUG WPS2SM:132 - Machter group: the name of a column from InputTable +2016-04-11 09:35:35 DEBUG WPS2SM:133 - Machter start: 47 +2016-04-11 09:35:35 DEBUG WPS2SM:134 - Machter end: 83 +2016-04-11 09:35:35 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-11 09:35:35 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: InputTable +2016-04-11 09:35:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing latitude decimal values [the name of a column from InputTable] +2016-04-11 09:35:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Latitude +2016-04-11 09:35:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:35 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=Latitude, description=The column containing latitude decimal values [the name of a column from InputTable] [Min N. of Entries:1; Max N. of Entries:1; default:lat], typology=COLUMN] +2016-04-11 09:35:35 DEBUG WPS2SM:254 - Conversion to SM Type->Quantities is a Literal Input +2016-04-11 09:35:35 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:35:35 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:35:35 DEBUG WPS2SM:111 - Machter title: The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:35:35 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:35:35 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from InputTable separated by | +2016-04-11 09:35:35 DEBUG WPS2SM:114 - Machter start: 38 +2016-04-11 09:35:35 DEBUG WPS2SM:115 - Machter end: 99 +2016-04-11 09:35:35 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:35:35 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: InputTable +2016-04-11 09:35:35 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:35:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ] +2016-04-11 09:35:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Quantities +2016-04-11 09:35:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:35 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=Quantities, description=The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:35:35 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=InputTable, description=The input table [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=Longitude, description=The column containing longitude decimal values [the name of a column from InputTable] [Min N. of Entries:1; Max N. of Entries:1; default:long], typology=COLUMN], Parameter [name=Latitude, description=The column containing latitude decimal values [the name of a column from InputTable] [Min N. of Entries:1; Max N. of Entries:1; default:lat], typology=COLUMN], Parameter [name=Quantities, description=The numeric quantities to visualize [a sequence of names of columns from InputTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST]] +2016-04-11 09:35:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:35:35 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:35:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:35:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:35:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:35:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:35:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:35:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:35:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:35:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:35:35 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:35:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:35:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:35:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:35:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:35:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:35:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:35:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:35:35 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:35:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:35:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:35:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:35 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:35:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:35:35 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:35:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:35:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:35:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:35:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:35:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:35:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:35:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:35:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:35:35 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:35:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:35:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:35:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:35:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:35:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:35:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:35:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:35:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:35:35 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:35:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:35 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:35:36 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:35:36 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:35:36 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:35:36 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:35:36 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:35:36 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:35:36 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:35:36 INFO WorkspaceExplorerServiceImpl:142 - end time - 183 msc 0 sec +2016-04-11 09:35:36 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:35:48 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:35:48 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:35:48 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:35:48 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING +2016-04-11 09:35:48 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:35:48 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:35:48 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING + MAX_ENT_NICHE_MODELLING + A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt + + + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + + + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + + + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + + + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + + + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + + + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + + + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + + + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + + + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + + + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + + + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + + + + + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + + + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + + + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + + + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + + + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + + + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:35:48 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:35:48 DEBUG SClient4WPS:290 - WPSClient->Input: + OutputTableLabel + The name of the table to produce + Name of the parameter: OutputTableLabel. The name of the table to produce + + + + maxent_ + + +2016-04-11 09:35:48 DEBUG SClient4WPS:290 - WPSClient->Input: + SpeciesName + The name of the species to model and the occurrence records refer to + Name of the parameter: SpeciesName. The name of the species to model and the occurrence records refer to + + + + generic_species + + +2016-04-11 09:35:48 DEBUG SClient4WPS:290 - WPSClient->Input: + MaxIterations + The number of learning iterations of the MaxEnt algorithm + Name of the parameter: MaxIterations. The number of learning iterations of the MaxEnt algorithm + + + + 1000 + + +2016-04-11 09:35:48 DEBUG SClient4WPS:290 - WPSClient->Input: + DefaultPrevalence + A priori probability of presence at ordinary occurrence points + Name of the parameter: DefaultPrevalence. A priori probability of presence at ordinary occurrence points + + + + 0.5 + + +2016-04-11 09:35:48 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencesTable + A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + Name of the parameter: OccurrencesTable. A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:35:48 DEBUG SClient4WPS:290 - WPSClient->Input: + LongitudeColumn + The column containing longitude values [the name of a column from OccurrencesTable] + Name of the parameter: LongitudeColumn. The column containing longitude values [the name of a column from OccurrencesTable] + + + + decimallongitude + + +2016-04-11 09:35:48 DEBUG SClient4WPS:290 - WPSClient->Input: + LatitudeColumn + The column containing latitude values [the name of a column from OccurrencesTable] + Name of the parameter: LatitudeColumn. The column containing latitude values [the name of a column from OccurrencesTable] + + + + decimallatitude + + +2016-04-11 09:35:48 DEBUG SClient4WPS:290 - WPSClient->Input: + XResolution + Model projection resolution on the X axis in decimal degrees + Name of the parameter: XResolution. Model projection resolution on the X axis in decimal degrees + + + + 1 + + +2016-04-11 09:35:48 DEBUG SClient4WPS:290 - WPSClient->Input: + YResolution + Model projection resolution on the Y axis in decimal degrees + Name of the parameter: YResolution. Model projection resolution on the Y axis in decimal degrees + + + + 1 + + +2016-04-11 09:35:48 DEBUG SClient4WPS:290 - WPSClient->Input: + Layers + The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + Name of the parameter: Layers. The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) + + + + + +2016-04-11 09:35:48 DEBUG SClient4WPS:290 - WPSClient->Input: + Z + Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + Name of the parameter: Z. Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer + + + + 0 + + +2016-04-11 09:35:48 DEBUG SClient4WPS:290 - WPSClient->Input: + TimeIndex + Time Index. The default is the first time indexed in the input environmental datasets + Name of the parameter: TimeIndex. Time Index. The default is the first time indexed in the input environmental datasets + + + + 0 + + +2016-04-11 09:35:48 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:35:48 DEBUG SClient4WPS:297 - WPSClient->Output: + Best Threshold + Best threshold for transforming MaxEnt values into 0/1 probability assignments + Name of the parameter: Best Threshold. Best threshold for transforming MaxEnt values into 0/1 probability assignments + + + + +2016-04-11 09:35:48 DEBUG SClient4WPS:297 - WPSClient->Output: + Estimated Prevalence + The a posteriori estimated prevalence of the species + Name of the parameter: Estimated Prevalence. The a posteriori estimated prevalence of the species + + + + +2016-04-11 09:35:48 DEBUG SClient4WPS:297 - WPSClient->Output: + Variables contributions + The contribution of each variable to the MaxEnt values estimates + Name of the parameter: Variables contributions. The contribution of each variable to the MaxEnt values estimates + + + + +2016-04-11 09:35:48 DEBUG SClient4WPS:297 - WPSClient->Output: + Variables Permutations Importance + The importance of the permutations of the variables during the training + Name of the parameter: Variables Permutations Importance. The importance of the permutations of the variables during the training + + + + +2016-04-11 09:35:48 DEBUG SClient4WPS:297 - WPSClient->Output: + ASCII Maps of the environmental layers for checking features aligments + ASCII Maps of the environmental layers for checking features aligments + Name of the parameter: ASCII Maps of the environmental layers for checking features aligments. ASCII Maps of the environmental layers for checking features aligments + + + + application/d4science + + + + + application/d4science + + + + +2016-04-11 09:35:48 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable7 + Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OutputTable7. Output table [a http link to a table in UTF-8 ecoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:35:48 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:35:48 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:35:48 DEBUG WPS2SM:254 - Conversion to SM Type->OutputTableLabel is a Literal Input +2016-04-11 09:35:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:48 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:35:48 DEBUG WPS2SM:101 - Guessed default value: maxent_ +2016-04-11 09:35:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the table to produce +2016-04-11 09:35:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OutputTableLabel +2016-04-11 09:35:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT] +2016-04-11 09:35:48 DEBUG WPS2SM:254 - Conversion to SM Type->SpeciesName is a Literal Input +2016-04-11 09:35:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:48 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:35:48 DEBUG WPS2SM:101 - Guessed default value: generic_species +2016-04-11 09:35:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The name of the species to model and the occurrence records refer to +2016-04-11 09:35:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:SpeciesName +2016-04-11 09:35:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT] +2016-04-11 09:35:48 DEBUG WPS2SM:254 - Conversion to SM Type->MaxIterations is a Literal Input +2016-04-11 09:35:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:35:48 DEBUG WPS2SM:101 - Guessed default value: 1000 +2016-04-11 09:35:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The number of learning iterations of the MaxEnt algorithm +2016-04-11 09:35:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:MaxIterations +2016-04-11 09:35:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT] +2016-04-11 09:35:48 DEBUG WPS2SM:254 - Conversion to SM Type->DefaultPrevalence is a Literal Input +2016-04-11 09:35:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:35:48 DEBUG WPS2SM:101 - Guessed default value: 0.5 +2016-04-11 09:35:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A priori probability of presence at ordinary occurrence points +2016-04-11 09:35:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:DefaultPrevalence +2016-04-11 09:35:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT] +2016-04-11 09:35:48 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencesTable is a Complex Input +2016-04-11 09:35:48 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:35:48 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:35:48 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:35:48 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:35:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] +2016-04-11 09:35:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencesTable +2016-04-11 09:35:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:48 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:35:48 DEBUG WPS2SM:254 - Conversion to SM Type->LongitudeColumn is a Literal Input +2016-04-11 09:35:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:48 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:35:48 DEBUG WPS2SM:101 - Guessed default value: decimallongitude +2016-04-11 09:35:48 DEBUG WPS2SM:130 - Machter title: The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude] +2016-04-11 09:35:48 DEBUG WPS2SM:131 - Machter find: true +2016-04-11 09:35:48 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-11 09:35:48 DEBUG WPS2SM:133 - Machter start: 40 +2016-04-11 09:35:48 DEBUG WPS2SM:134 - Machter end: 82 +2016-04-11 09:35:48 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-11 09:35:48 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-11 09:35:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing longitude values [the name of a column from OccurrencesTable] +2016-04-11 09:35:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LongitudeColumn +2016-04-11 09:35:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:48 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN] +2016-04-11 09:35:48 DEBUG WPS2SM:254 - Conversion to SM Type->LatitudeColumn is a Literal Input +2016-04-11 09:35:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:48 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:35:48 DEBUG WPS2SM:101 - Guessed default value: decimallatitude +2016-04-11 09:35:48 DEBUG WPS2SM:130 - Machter title: The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude] +2016-04-11 09:35:48 DEBUG WPS2SM:131 - Machter find: true +2016-04-11 09:35:48 DEBUG WPS2SM:132 - Machter group: the name of a column from OccurrencesTable +2016-04-11 09:35:48 DEBUG WPS2SM:133 - Machter start: 39 +2016-04-11 09:35:48 DEBUG WPS2SM:134 - Machter end: 81 +2016-04-11 09:35:48 DEBUG WPS2SM:135 - Machter Group Count: 1 +2016-04-11 09:35:48 DEBUG WPS2SM:137 - Matcher referredTabularParameterName: OccurrencesTable +2016-04-11 09:35:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The column containing latitude values [the name of a column from OccurrencesTable] +2016-04-11 09:35:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:LatitudeColumn +2016-04-11 09:35:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:48 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN] +2016-04-11 09:35:48 DEBUG WPS2SM:254 - Conversion to SM Type->XResolution is a Literal Input +2016-04-11 09:35:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:35:48 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:35:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the X axis in decimal degrees +2016-04-11 09:35:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:XResolution +2016-04-11 09:35:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:35:48 DEBUG WPS2SM:254 - Conversion to SM Type->YResolution is a Literal Input +2016-04-11 09:35:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:35:48 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:35:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Model projection resolution on the Y axis in decimal degrees +2016-04-11 09:35:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:YResolution +2016-04-11 09:35:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:35:48 DEBUG WPS2SM:254 - Conversion to SM Type->Layers is a Literal Input +2016-04-11 09:35:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:48 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:35:48 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:35:48 DEBUG WPS2SM:147 - Machter title: The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:35:48 DEBUG WPS2SM:148 - Machter find: true +2016-04-11 09:35:48 DEBUG WPS2SM:149 - Machter group: a sequence of values separated by | +2016-04-11 09:35:48 DEBUG WPS2SM:150 - Machter start: 501 +2016-04-11 09:35:48 DEBUG WPS2SM:151 - Machter end: 536 +2016-04-11 09:35:48 DEBUG WPS2SM:152 - Machter Group Count: 1 +2016-04-11 09:35:48 DEBUG WPS2SM:155 - Matcher separator: | +2016-04-11 09:35:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) +2016-04-11 09:35:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Layers +2016-04-11 09:35:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:48 DEBUG SClient4WPS:645 - InputParameter: ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST] +2016-04-11 09:35:48 DEBUG WPS2SM:254 - Conversion to SM Type->Z is a Literal Input +2016-04-11 09:35:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Double +2016-04-11 09:35:48 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-11 09:35:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer +2016-04-11 09:35:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:Z +2016-04-11 09:35:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-11 09:35:48 DEBUG WPS2SM:254 - Conversion to SM Type->TimeIndex is a Literal Input +2016-04-11 09:35:48 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:35:48 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:35:48 DEBUG WPS2SM:101 - Guessed default value: 0 +2016-04-11 09:35:48 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Time Index. The default is the first time indexed in the input environmental datasets +2016-04-11 09:35:48 DEBUG WPS2SM:291 - Conversion to SM Type->Name:TimeIndex +2016-04-11 09:35:48 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:35:48 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT] +2016-04-11 09:35:48 DEBUG SClient4WPS:649 - Parameters: [ObjectParameter [type=java.lang.String, defaultValue=maxent_, value=null, name=OutputTableLabel, description=The name of the table to produce [Min N. of Entries:1; Max N. of Entries:1; default:maxent_], typology=OBJECT], ObjectParameter [type=java.lang.String, defaultValue=generic_species, value=null, name=SpeciesName, description=The name of the species to model and the occurrence records refer to [Min N. of Entries:1; Max N. of Entries:1; default:generic_species], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1000, value=null, name=MaxIterations, description=The number of learning iterations of the MaxEnt algorithm [Min N. of Entries:1; Max N. of Entries:1; default:1000], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=0.5, value=null, name=DefaultPrevalence, description=A priori probability of presence at ordinary occurrence points [Min N. of Entries:1; Max N. of Entries:1; default:0.5], typology=OBJECT], TabularParameter [tableName= , templates=[], name=OccurrencesTable, description=A geospatial table containing occurrence records, following the template of the Species Products Discovery datasets [a http link to a table in UTF-8 encoding following this template: (OCCURRENCE_SPECIES) http://goo.gl/4ExuR5] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=LongitudeColumn, description=The column containing longitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallongitude], typology=COLUMN], Parameter [name=LatitudeColumn, description=The column containing latitude values [the name of a column from OccurrencesTable] [Min N. of Entries:1; Max N. of Entries:1; default:decimallatitude], typology=COLUMN], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=XResolution, description=Model projection resolution on the X axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Double, defaultValue=1, value=null, name=YResolution, description=Model projection resolution on the Y axis in decimal degrees [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ListParameter [type=java.lang.String, value=null, separator=|, name=Layers, description=The list of environmental layers to use for enriching the points. Each entry is a layer Title or UUID or HTTP link. E.g. the title or the UUID (preferred) of a layer indexed in the e-Infrastructure on GeoNetwork - You can retrieve it from GeoExplorer. Otherwise you can supply the direct HTTP link of the layer. The format will be guessed from the link. The default is GeoTiff. Supports several standards (NETCDF, WFS, WCS, ASC, GeoTiff ). E.g. https://dl.dropboxusercontent.com/u/12809149/wind1.tif [a sequence of values separated by | ] (format: String) [Min N. of Entries:1; Max N. of Entries:1], typology=LIST], ObjectParameter [type=java.lang.Double, defaultValue=0, value=null, name=Z, description=Value of Z. Default is 0, that means environmental layers processing will be at surface level or at the first avaliable Z value in the layer [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=0, value=null, name=TimeIndex, description=Time Index. The default is the first time indexed in the input environmental datasets [Min N. of Entries:1; Max N. of Entries:1; default:0], typology=OBJECT]] +2016-04-11 09:35:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:35:49 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:35:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:35:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:35:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:35:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:35:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:35:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:35:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:35:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:35:49 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:35:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:35:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:35:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:35:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:35:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:35:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:35:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:35:49 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:35:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:35:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:35:49 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:35:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:35:49 DEBUG ASLSession:458 - Getting security token: null in thread 36 +2016-04-11 09:35:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 36 +2016-04-11 09:35:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:35:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:35:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:35:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:35:49 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:35:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:35:49 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:35:49 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:35:49 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:35:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:35:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:35:49 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:35:49 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:35:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:35:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:49 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:35:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:49 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:35:49 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:35:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:49 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:35:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:49 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:35:49 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:35:49 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:35:49 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:35:49 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:35:49 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:35:49 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:35:49 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:35:49 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:35:49 INFO WorkspaceExplorerServiceImpl:142 - end time - 197 msc 0 sec +2016-04-11 09:35:49 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:36:10 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:36:10 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:37:23 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-11 09:37:23 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-11 09:37:23 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-11 09:37:23 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 09:37:23 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:37:23 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:37:23 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 09:37:23 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@666a64de +2016-04-11 09:37:23 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:37:23 INFO ASLSession:352 - Logging the entrance +2016-04-11 09:37:23 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 09:37:23 DEBUG TemplateModel:83 - 2016-04-11 09:37:23, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 09:37:23 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:37:23 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-11 09:37:27 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 09:37:27 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 09:37:27 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 09:37:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:37:27 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:37:27 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:37:27 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:37:27 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 129 ms +2016-04-11 09:37:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-11 09:37:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-11 09:37:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-11 09:37:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-11 09:37:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-11 09:37:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-11 09:37:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-11 09:37:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-11 09:37:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-11 09:37:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-11 09:37:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-11 09:37:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-11 09:37:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-11 09:37:27 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-11 09:37:27 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-11 09:37:27 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:37:27 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:37:27 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@4f35bb56 +2016-04-11 09:37:27 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@47b6ff2e +2016-04-11 09:37:27 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@c877ef3 +2016-04-11 09:37:27 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@6813cc41 +2016-04-11 09:37:27 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 105 ms +2016-04-11 09:37:27 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-11 09:37:27 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-11 09:37:27 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-11 09:37:27 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-11 09:37:27 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-11 09:37:27 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:37:27 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:37:27 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-11 09:37:27 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 29 ms +2016-04-11 09:37:27 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-11 09:37:27 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:37:27 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-11 09:37:27 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:37:28 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:37:28 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]] +2016-04-11 09:37:31 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:37:31 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:37:31 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:37:31 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN +2016-04-11 09:37:31 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:37:31 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:37:32 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN + DBSCAN + A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + + + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:37:32 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:37:32 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:37:32 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:37:32 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:37:32 DEBUG SClient4WPS:290 - WPSClient->Input: + epsilon + DBScan epsilon parameter + Name of the parameter: epsilon. DBScan epsilon parameter + + + + 10 + + +2016-04-11 09:37:32 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + DBScan minimum points parameter (identifies outliers) + Name of the parameter: min_points. DBScan minimum points parameter (identifies outliers) + + + + 1 + + +2016-04-11 09:37:32 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:37:32 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:37:32 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:37:32 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:37:32 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:37:32 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:37:32 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:37:32 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:37:32 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:37:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:37:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:37:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:32 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:37:32 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:37:32 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:32 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:37:32 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:37:32 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:37:32 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:37:32 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:37:32 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:37:32 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:37:32 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:37:32 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:37:32 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:37:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:37:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:37:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:32 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:37:32 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:37:32 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:32 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:37:32 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:37:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:37:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:37:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:32 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:37:32 DEBUG WPS2SM:254 - Conversion to SM Type->epsilon is a Literal Input +2016-04-11 09:37:32 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:32 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:37:32 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:37:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan epsilon parameter +2016-04-11 09:37:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:epsilon +2016-04-11 09:37:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:32 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:37:32 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:37:32 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:32 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:37:32 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:37:32 DEBUG WPS2SM:290 - Conversion to SM Type->Title:DBScan minimum points parameter (identifies outliers) +2016-04-11 09:37:32 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:37:32 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:32 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:37:32 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=epsilon, description=DBScan epsilon parameter [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=min_points, description=DBScan minimum points parameter (identifies outliers) [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT]] +2016-04-11 09:37:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:37:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:37:32 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:37:32 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:37:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:37:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:37:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:37:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:37:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:37:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:37:32 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:37:32 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:37:32 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:37:32 INFO HomeLibraryConfiguration:76 - calculating the persistence folder +2016-04-11 09:37:32 INFO HomeLibraryConfiguration:103 - Using tmp dir /tmp +2016-04-11 09:37:32 DEBUG JCRHomeManagerFactory:44 - Initialize content manager +2016-04-11 09:37:32 DEBUG JCRRepository:271 - Initialize repository +2016-04-11 09:37:32 DEBUG JCRRepository:97 - scope for repository creation is /gcube caller scope is /gcube/devsec/devVRE +2016-04-11 09:37:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 35 +2016-04-11 09:37:32 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:37:32 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource +2016-04-11 09:37:32 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Database' and $resource/Profile/Name eq 'HomeLibraryRepository' ) return $resource in 16 ms +2016-04-11 09:37:32 DEBUG JCRRepository:152 - user is workspacerep.imarine password is null?false and repository is null?false +2016-04-11 09:37:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:37:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:37:32 INFO HomeManageFactory:103 - getInstance persistenceRoot: /tmp/home_library_persistence +2016-04-11 09:37:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:37:32 INFO JCRHomeManager:71 - User giancarlo.panichi not found, creating a new one. +2016-04-11 09:37:32 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:37:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:37:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:37:32 INFO JCRRepository:279 - getHome giancarlo.panichi +2016-04-11 09:37:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:37:32 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:37:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:37:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:37:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:37:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:37:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:37:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:37:32 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:37:32 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:37:32 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:37:32 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:37:32 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:37:32 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:37:32 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:37:33 INFO JCRRepository:307 - giancarlo.panichi - USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:37:33 INFO JCRRepository:377 - skip init in JCRRepository +2016-04-11 09:37:33 DEBUG JCRUserManager:520 - Response 3.1.1 + +2016-04-11 09:37:33 INFO JCRWorkspace:2962 - giancarlo.panichi --> USER VERSION: 3.1.1 - HL VERSION: 3.1.1 +2016-04-11 09:37:33 INFO JCRWorkspace:3019 - skip init in JCRWorkspace for user: giancarlo.panichi +2016-04-11 09:37:33 INFO JCRHomeManager:82 - User created: giancarlo.panichi +2016-04-11 09:37:33 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:37:33 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:37:33 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:37:33 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:37:33 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:37:33 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:37:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:33 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:37:33 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:37:33 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:37:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:37:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:37:33 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:37:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:37:33 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:37:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:33 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:33 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:33 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:37:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:37:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:37:33 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:37:33 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:37:33 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 30 ms +2016-04-11 09:37:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:37:33 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:37:33 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource +2016-04-11 09:37:33 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'Service' and $resource/Profile/Name eq 'HTTP-URI-Resolver' ) return $resource in 16 ms +2016-04-11 09:37:33 DEBUG StorageClient:316 - server not set. Try to query IS in scope: /gcube +2016-04-11 09:37:33 INFO ISClientConnector:82 - found only one RR, take it +2016-04-11 09:37:33 INFO ISClientConnector:140 - Type of backend found MongoDB +2016-04-11 09:37:33 INFO ISClientConnector:57 - ISCACHE: ELEMENT INSERTED +2016-04-11 09:37:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:37:33 DEBUG StorageClient:517 - set scope: /gcube +2016-04-11 09:37:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:37:33 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:37:33 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource +2016-04-11 09:37:33 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataStorage' and $resource/Profile/Name eq 'StorageManager' ) return $resource in 31 ms +2016-04-11 09:37:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:37:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:37:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube in thread 33 +2016-04-11 09:37:33 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:37:33 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:37:33 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:37:33 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:37:33 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:37:33 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:37:33 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:37:33 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:37:33 INFO WorkspaceExplorerServiceImpl:142 - end time - 422 msc 0 sec +2016-04-11 09:37:33 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:37:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:37:35 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:37:35 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:37:35 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS +2016-04-11 09:37:35 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:37:35 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:37:35 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS + KMEANS + A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + + + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + + + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:37:35 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:37:35 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:37:35 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:37:35 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:37:35 DEBUG SClient4WPS:290 - WPSClient->Input: + k + expected Number of Clusters + Name of the parameter: k. expected Number of Clusters + + + + 3 + + +2016-04-11 09:37:35 DEBUG SClient4WPS:290 - WPSClient->Input: + max_runs + max runs of the clustering procedure + Name of the parameter: max_runs. max runs of the clustering procedure + + + + 10 + + +2016-04-11 09:37:35 DEBUG SClient4WPS:290 - WPSClient->Input: + max_optimization_steps + max number of internal optimization steps + Name of the parameter: max_optimization_steps. max number of internal optimization steps + + + + 5 + + +2016-04-11 09:37:35 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:37:35 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:37:35 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:37:35 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:37:35 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:37:35 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:37:35 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:37:35 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:37:35 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:37:35 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:37:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:37:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:37:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:35 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:37:35 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:37:35 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:37:35 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:37:35 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:37:35 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:37:35 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:37:35 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:37:35 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:37:35 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:37:35 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:37:35 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:37:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:37:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:37:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:35 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:37:35 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:37:35 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:35 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:37:35 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:37:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:37:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:37:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:37:35 DEBUG WPS2SM:254 - Conversion to SM Type->k is a Literal Input +2016-04-11 09:37:35 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:37:35 DEBUG WPS2SM:101 - Guessed default value: 3 +2016-04-11 09:37:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:expected Number of Clusters +2016-04-11 09:37:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:k +2016-04-11 09:37:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT] +2016-04-11 09:37:35 DEBUG WPS2SM:254 - Conversion to SM Type->max_runs is a Literal Input +2016-04-11 09:37:35 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:37:35 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:37:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max runs of the clustering procedure +2016-04-11 09:37:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_runs +2016-04-11 09:37:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:37:35 DEBUG WPS2SM:254 - Conversion to SM Type->max_optimization_steps is a Literal Input +2016-04-11 09:37:35 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:37:35 DEBUG WPS2SM:101 - Guessed default value: 5 +2016-04-11 09:37:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:max number of internal optimization steps +2016-04-11 09:37:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:max_optimization_steps +2016-04-11 09:37:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT] +2016-04-11 09:37:35 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:37:35 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:35 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:37:35 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:37:35 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:37:35 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:37:35 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:35 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:37:35 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=3, value=null, name=k, description=expected Number of Clusters [Min N. of Entries:1; Max N. of Entries:1; default:3], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=max_runs, description=max runs of the clustering procedure [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=5, value=null, name=max_optimization_steps, description=max number of internal optimization steps [Min N. of Entries:1; Max N. of Entries:1; default:5], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:37:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:37:35 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:37:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:37:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:37:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:37:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:37:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:37:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:37:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:37:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:37:35 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:37:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:37:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:37:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:37:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:37:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:37:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:37:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:37:35 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:37:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:37:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:37:35 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:37:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:37:35 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:37:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:37:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:37:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:37:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:37:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:37:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:37:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:37:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:37:35 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:37:35 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:37:35 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:37:35 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:37:35 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:37:35 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:37:35 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:37:35 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:37:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:35 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:35 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:36 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:37:36 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:37:36 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:36 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:36 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:37:36 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:37:36 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:37:36 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:37:36 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:37:36 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:37:36 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:37:36 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:37:36 INFO WorkspaceExplorerServiceImpl:142 - end time - 220 msc 0 sec +2016-04-11 09:37:36 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:37:42 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 09:37:42 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:37:42 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:37:42 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 09:37:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:37:42 INFO ASLSession:352 - Logging the entrance +2016-04-11 09:37:42 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 09:37:42 DEBUG TemplateModel:83 - 2016-04-11 09:37:42, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 09:37:42 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:37:42 INFO DiscoveryDelegate:77 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
(cached) +2016-04-11 09:37:42 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-11 09:37:42 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-11 09:37:42 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-11 09:37:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:37:42 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:37:42 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-11 09:37:42 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 23 ms +2016-04-11 09:37:42 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-11 09:37:42 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:37:42 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-11 09:37:42 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF +2016-04-11 09:37:42 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:37:42 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:37:42 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF + LOF + Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed. + + + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + + + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + + + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + + + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + + + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + + + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:37:42 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:37:42 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsTable + Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: PointsTable. Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:37:42 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from PointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from PointsTable separated by | ] + + + + + +2016-04-11 09:37:42 DEBUG SClient4WPS:290 - WPSClient->Input: + PointsClusterLabel + table name of the resulting distribution + Name of the parameter: PointsClusterLabel. table name of the resulting distribution + + + + Cluster_ + + +2016-04-11 09:37:42 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_lower_bound + locality (usually called k): minimal number of nearest neighbors + Name of the parameter: minimal_points_lower_bound. locality (usually called k): minimal number of nearest neighbors + + + + 2 + + +2016-04-11 09:37:42 DEBUG SClient4WPS:290 - WPSClient->Input: + minimal_points_upper_bound + maximum number of nearest neighbors to take into account for outliers evaluation + Name of the parameter: minimal_points_upper_bound. maximum number of nearest neighbors to take into account for outliers evaluation + + + + 10 + + +2016-04-11 09:37:42 DEBUG SClient4WPS:290 - WPSClient->Input: + distance_function + the distance function to use in the calculation + Name of the parameter: distance_function. the distance function to use in the calculation + + + + euclidian distance + squared distance + cosine distance + inverted cosine distance + angle + + euclidian distance + + +2016-04-11 09:37:42 DEBUG SClient4WPS:290 - WPSClient->Input: + lof_threshold + the LOF score threshold over which the point is an outlier (usually 2) + Name of the parameter: lof_threshold. the LOF score threshold over which the point is an outlier (usually 2) + + + + 2 + + +2016-04-11 09:37:42 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:37:42 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:37:42 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:37:42 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:37:42 DEBUG WPS2SM:279 - Conversion to SM Type->PointsTable is a Complex Input +2016-04-11 09:37:42 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:37:42 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:37:42 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:37:42 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:37:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:37:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsTable +2016-04-11 09:37:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:42 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:37:42 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:37:42 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:42 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:37:42 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:37:42 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:37:42 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:37:42 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from PointsTable separated by | +2016-04-11 09:37:42 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:37:42 DEBUG WPS2SM:115 - Machter end: 93 +2016-04-11 09:37:42 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:37:42 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: PointsTable +2016-04-11 09:37:42 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:37:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from PointsTable separated by | ] +2016-04-11 09:37:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:37:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:42 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:37:42 DEBUG WPS2SM:254 - Conversion to SM Type->PointsClusterLabel is a Literal Input +2016-04-11 09:37:42 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:42 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:37:42 DEBUG WPS2SM:101 - Guessed default value: Cluster_ +2016-04-11 09:37:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:37:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:PointsClusterLabel +2016-04-11 09:37:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT] +2016-04-11 09:37:42 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_lower_bound is a Literal Input +2016-04-11 09:37:42 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:42 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:37:42 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:37:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:locality (usually called k): minimal number of nearest neighbors +2016-04-11 09:37:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_lower_bound +2016-04-11 09:37:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:37:42 DEBUG WPS2SM:254 - Conversion to SM Type->minimal_points_upper_bound is a Literal Input +2016-04-11 09:37:42 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:42 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:37:42 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:37:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of nearest neighbors to take into account for outliers evaluation +2016-04-11 09:37:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minimal_points_upper_bound +2016-04-11 09:37:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:37:42 DEBUG WPS2SM:254 - Conversion to SM Type->distance_function is a Literal Input +2016-04-11 09:37:42 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:42 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:37:42 DEBUG WPS2SM:101 - Guessed default value: euclidian distance +2016-04-11 09:37:42 DEBUG WPS2SM:265 - ValueType[]:[euclidian distance, squared distance, cosine distance, inverted cosine distance, angle] +2016-04-11 09:37:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the distance function to use in the calculation +2016-04-11 09:37:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:distance_function +2016-04-11 09:37:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:42 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM] +2016-04-11 09:37:42 DEBUG WPS2SM:254 - Conversion to SM Type->lof_threshold is a Literal Input +2016-04-11 09:37:42 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:42 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:37:42 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:37:42 DEBUG WPS2SM:290 - Conversion to SM Type->Title:the LOF score threshold over which the point is an outlier (usually 2) +2016-04-11 09:37:42 DEBUG WPS2SM:291 - Conversion to SM Type->Name:lof_threshold +2016-04-11 09:37:42 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:42 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:37:42 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=PointsTable, description=Table containing points or observations. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from PointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=Cluster_, value=null, name=PointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:Cluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=minimal_points_lower_bound, description=locality (usually called k): minimal number of nearest neighbors [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=minimal_points_upper_bound, description=maximum number of nearest neighbors to take into account for outliers evaluation [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], Parameter [name=distance_function, description=the distance function to use in the calculation [Min N. of Entries:1; Max N. of Entries:1; default:euclidian distance], typology=ENUM], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=lof_threshold, description=the LOF score threshold over which the point is an outlier (usually 2) [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:37:42 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 09:37:42 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 09:37:42 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 09:37:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:37:42 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:37:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:37:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:37:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:37:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:37:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:37:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:37:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:37:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:37:42 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:37:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:37:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:37:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:37:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:37:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:37:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:37:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:37:42 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:37:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:37:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:37:42 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:37:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:37:42 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:37:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:37:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:37:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:37:42 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:37:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:37:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:37:42 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:37:42 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:37:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:37:42 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:37:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:37:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:37:42 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:37:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:37:42 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:37:42 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:37:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:42 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:37:42 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:37:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:42 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:42 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:43 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:43 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:37:43 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:37:43 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:37:43 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:37:43 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:37:43 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:37:43 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:37:43 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:37:43 INFO WorkspaceExplorerServiceImpl:142 - end time - 190 msc 0 sec +2016-04-11 09:37:43 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:37:45 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 34 +2016-04-11 09:37:45 DEBUG ASLSession:458 - Getting security token: null in thread 34 +2016-04-11 09:37:45 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:37:45 INFO SClient4WPS:630 - Parameters of algorithm org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS +2016-04-11 09:37:45 DEBUG SClient4WPS:263 - Describe Process WPS URL: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:37:45 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:37:46 DEBUG SClient4WPS:284 - + org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS + XMEANS + A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed. + + + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + + + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + + + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + + + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + + + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + + + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + + + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + + + + + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + + + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + + +2016-04-11 09:37:46 DEBUG SClient4WPS:288 - WPSClient->Fetching Inputs +2016-04-11 09:37:46 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsTable + Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + Name of the parameter: OccurrencePointsTable. Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] + + + + text/xml + + + + + text/xml + + + text/csv + + + text/plain + + + + +2016-04-11 09:37:46 DEBUG SClient4WPS:290 - WPSClient->Input: + FeaturesColumnNames + column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + Name of the parameter: FeaturesColumnNames. column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] + + + + + +2016-04-11 09:37:46 DEBUG SClient4WPS:290 - WPSClient->Input: + OccurrencePointsClusterLabel + table name of the resulting distribution + Name of the parameter: OccurrencePointsClusterLabel. table name of the resulting distribution + + + + OccCluster_ + + +2016-04-11 09:37:46 DEBUG SClient4WPS:290 - WPSClient->Input: + maxIterations + XMeans max number of overall iterations of the clustering learning + Name of the parameter: maxIterations. XMeans max number of overall iterations of the clustering learning + + + + 10 + + +2016-04-11 09:37:46 DEBUG SClient4WPS:290 - WPSClient->Input: + minClusters + minimum number of expected clusters + Name of the parameter: minClusters. minimum number of expected clusters + + + + 1 + + +2016-04-11 09:37:46 DEBUG SClient4WPS:290 - WPSClient->Input: + maxClusters + maximum number of clusters to produce + Name of the parameter: maxClusters. maximum number of clusters to produce + + + + 50 + + +2016-04-11 09:37:46 DEBUG SClient4WPS:290 - WPSClient->Input: + min_points + number of points which define an outlier set + Name of the parameter: min_points. number of points which define an outlier set + + + + 2 + + +2016-04-11 09:37:46 DEBUG SClient4WPS:295 - WPSClient->Fetching Outputs +2016-04-11 09:37:46 DEBUG SClient4WPS:297 - WPSClient->Output: + OutputTable + Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + Name of the parameter: OutputTable. Output cluster table [a http link to a table in UTF-8 ecoding following this template: (CLUSTER) http://goo.gl/PnKhhb] + + + + text/csv + + + + + text/csv + + + + +2016-04-11 09:37:46 DEBUG SClient4WPS:297 - WPSClient->Output: + non_deterministic_output + NonDeterministicOutput + Output that is not predetermined + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + + text/xml; subtype=gml/2.1.2 + http://schemas.opengis.net/gml/2.1.2/feature.xsd + + + + +2016-04-11 09:37:46 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:37:46 DEBUG WPS2SM:279 - Conversion to SM Type->OccurrencePointsTable is a Complex Input +2016-04-11 09:37:46 DEBUG WPS2SM:285 - Max Megabytes: 1 +2016-04-11 09:37:46 DEBUG WPS2SM:200 - MimeType: text/xml +2016-04-11 09:37:46 DEBUG WPS2SM:201 - Schema: null +2016-04-11 09:37:46 DEBUG WPS2SM:202 - Encoding: null +2016-04-11 09:37:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] +2016-04-11 09:37:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsTable +2016-04-11 09:37:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:46 DEBUG SClient4WPS:645 - InputParameter: TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR] +2016-04-11 09:37:46 DEBUG WPS2SM:254 - Conversion to SM Type->FeaturesColumnNames is a Literal Input +2016-04-11 09:37:46 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:46 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:37:46 DEBUG WPS2SM:101 - Guessed default value: +2016-04-11 09:37:46 DEBUG WPS2SM:111 - Machter title: column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1] +2016-04-11 09:37:46 DEBUG WPS2SM:112 - Machter find: true +2016-04-11 09:37:46 DEBUG WPS2SM:113 - Machter group: a sequence of names of columns from OccurrencePointsTable separated by | +2016-04-11 09:37:46 DEBUG WPS2SM:114 - Machter start: 31 +2016-04-11 09:37:46 DEBUG WPS2SM:115 - Machter end: 103 +2016-04-11 09:37:46 DEBUG WPS2SM:116 - Machter Group Count: 2 +2016-04-11 09:37:46 DEBUG WPS2SM:118 - Matcher referredTabularParameterName: OccurrencePointsTable +2016-04-11 09:37:46 DEBUG WPS2SM:121 - Matcher separator: | +2016-04-11 09:37:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] +2016-04-11 09:37:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:FeaturesColumnNames +2016-04-11 09:37:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:46 DEBUG SClient4WPS:645 - InputParameter: Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST] +2016-04-11 09:37:46 DEBUG WPS2SM:254 - Conversion to SM Type->OccurrencePointsClusterLabel is a Literal Input +2016-04-11 09:37:46 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:46 DEBUG WPS2SM:95 - Guessed type: java.lang.String +2016-04-11 09:37:46 DEBUG WPS2SM:101 - Guessed default value: OccCluster_ +2016-04-11 09:37:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:table name of the resulting distribution +2016-04-11 09:37:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:OccurrencePointsClusterLabel +2016-04-11 09:37:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:46 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT] +2016-04-11 09:37:46 DEBUG WPS2SM:254 - Conversion to SM Type->maxIterations is a Literal Input +2016-04-11 09:37:46 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:46 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:37:46 DEBUG WPS2SM:101 - Guessed default value: 10 +2016-04-11 09:37:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:XMeans max number of overall iterations of the clustering learning +2016-04-11 09:37:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxIterations +2016-04-11 09:37:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:46 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT] +2016-04-11 09:37:46 DEBUG WPS2SM:254 - Conversion to SM Type->minClusters is a Literal Input +2016-04-11 09:37:46 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:46 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:37:46 DEBUG WPS2SM:101 - Guessed default value: 1 +2016-04-11 09:37:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:minimum number of expected clusters +2016-04-11 09:37:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:minClusters +2016-04-11 09:37:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:46 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT] +2016-04-11 09:37:46 DEBUG WPS2SM:254 - Conversion to SM Type->maxClusters is a Literal Input +2016-04-11 09:37:46 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:46 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:37:46 DEBUG WPS2SM:101 - Guessed default value: 50 +2016-04-11 09:37:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:maximum number of clusters to produce +2016-04-11 09:37:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:maxClusters +2016-04-11 09:37:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:46 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT] +2016-04-11 09:37:46 DEBUG WPS2SM:254 - Conversion to SM Type->min_points is a Literal Input +2016-04-11 09:37:46 DEBUG WPS2SM:93 - WPS type: +2016-04-11 09:37:46 DEBUG WPS2SM:95 - Guessed type: java.lang.Integer +2016-04-11 09:37:46 DEBUG WPS2SM:101 - Guessed default value: 2 +2016-04-11 09:37:46 DEBUG WPS2SM:290 - Conversion to SM Type->Title:number of points which define an outlier set +2016-04-11 09:37:46 DEBUG WPS2SM:291 - Conversion to SM Type->Name:min_points +2016-04-11 09:37:46 DEBUG WPS2SM:292 - Conversion to SM Type->Number of Inputs to Manage:1 +2016-04-11 09:37:46 DEBUG SClient4WPS:645 - InputParameter: ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT] +2016-04-11 09:37:46 DEBUG SClient4WPS:649 - Parameters: [TabularParameter [tableName= , templates=[], name=OccurrencePointsTable, description=Occurrence Points Table. Max 4000 points [a http link to a table in UTF-8 encoding following this template: (GENERIC) A generic comma separated csv file in UTF-8 encoding] [Max MB Size:1; Min N. of Entries:1; Max N. of Entries:1], typology=TABULAR], Parameter [name=FeaturesColumnNames, description=column Names for the features [a sequence of names of columns from OccurrencePointsTable separated by | ] [Min N. of Entries:1; Max N. of Entries:1], typology=COLUMN_LIST], ObjectParameter [type=java.lang.String, defaultValue=OccCluster_, value=null, name=OccurrencePointsClusterLabel, description=table name of the resulting distribution [Min N. of Entries:1; Max N. of Entries:1; default:OccCluster_], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=10, value=null, name=maxIterations, description=XMeans max number of overall iterations of the clustering learning [Min N. of Entries:1; Max N. of Entries:1; default:10], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=1, value=null, name=minClusters, description=minimum number of expected clusters [Min N. of Entries:1; Max N. of Entries:1; default:1], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=50, value=null, name=maxClusters, description=maximum number of clusters to produce [Min N. of Entries:1; Max N. of Entries:1; default:50], typology=OBJECT], ObjectParameter [type=java.lang.Integer, defaultValue=2, value=null, name=min_points, description=number of points which define an outlier set [Min N. of Entries:1; Max N. of Entries:1; default:2], typology=OBJECT]] +2016-04-11 09:37:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:37:46 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:37:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:37:46 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:37:46 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:37:46 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:37:46 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:37:46 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:37:46 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:37:46 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:37:46 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:37:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:37:46 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:37:46 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:37:46 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:37:46 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:37:46 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:37:46 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:37:46 INFO JCRWorkspace:2549 - getMySpecialFolders: /Home/giancarlo.panichi/Workspace/MySpecialFolders +2016-04-11 09:37:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/MySpecialFolders by giancarlo.panichi +2016-04-11 09:37:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:37:46 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:37:46 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:46 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:37:46 DEBUG ASLSession:458 - Getting security token: null in thread 33 +2016-04-11 09:37:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 33 +2016-04-11 09:37:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:37:46 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:37:46 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:37:46 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:37:46 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:37:46 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:37:46 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:37:46 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:37:46 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:37:46 INFO HomeManageFactory:196 - loading home. (Username: giancarlo.panichi, scope: /gcube/devsec/devVRE) +2016-04-11 09:37:46 INFO HomeManageFactory:118 - getHomeManagerFactory +2016-04-11 09:37:46 DEBUG JCRHomeManagerFactory:123 - getHomeManager +2016-04-11 09:37:46 INFO JCRHomeManager:48 - getUser portalLogin: giancarlo.panichi +2016-04-11 09:37:46 INFO JCRHomeManager:93 - getHome user: giancarlo.panichi +2016-04-11 09:37:46 DEBUG JCRHomeManager:97 - User is already logged +2016-04-11 09:37:46 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:46 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:46 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:37:46 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:37:46 INFO JCRWorkspace:315 - Getting Workspace of user: giancarlo.panichi +2016-04-11 09:37:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:46 INFO JCRServlets:238 - Calling Servlet GetItemByPath /Home/giancarlo.panichi/Workspace/ by giancarlo.panichi +2016-04-11 09:37:46 INFO JCRServlets:142 - Calling servlet getChildrenById efc48ebb-f682-4636-9a98-1cbee166e336 by giancarlo.panichi +2016-04-11 09:37:46 DEBUG ItemBuilder:108 - Is shared folder: test +2016-04-11 09:37:46 DEBUG ItemBuilder:108 - Is shared folder: Cotrix test +2016-04-11 09:37:46 DEBUG ItemBuilder:108 - Is shared folder: GPImg +2016-04-11 09:37:46 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:37:46 DEBUG ItemBuilder:108 - Is shared folder: SAI_ImportBigProject +2016-04-11 09:37:46 DEBUG ItemBuilder:108 - Is shared folder: StatisticalAlgorithmsImporter +2016-04-11 09:37:46 DEBUG ItemBuilder:108 - Is shared folder: rasters +2016-04-11 09:37:46 INFO WorkspaceExplorerServiceImpl:142 - end time - 162 msc 0 sec +2016-04-11 09:37:46 INFO WorkspaceExplorerServiceImpl:145 - Returning children size: 41 +2016-04-11 09:38:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:38:18 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:39:13 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 32 +2016-04-11 09:39:13 DEBUG ASLSession:458 - Getting security token: null in thread 32 +2016-04-11 09:40:08 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:40:08 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:41:03 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:41:03 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:41:58 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 31 +2016-04-11 09:41:58 DEBUG ASLSession:458 - Getting security token: null in thread 31 +2016-04-11 09:43:05 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-11 09:43:05 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-11 09:43:05 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-11 09:43:05 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 09:43:05 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:43:05 DEBUG ASLSession:458 - Getting security token: null in thread 35 +2016-04-11 09:43:05 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 09:43:05 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@2f985093 +2016-04-11 09:43:05 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 35 +2016-04-11 09:43:05 INFO ASLSession:352 - Logging the entrance +2016-04-11 09:43:05 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 09:43:05 DEBUG TemplateModel:83 - 2016-04-11 09:43:05, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 09:43:05 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:43:05 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-11 09:56:15 DEBUG AccessLogger:124 - Creating a message handling object in order to handle the message queue +2016-04-11 09:56:15 DEBUG AccessLogger:44 - Constructing a new access logger. Create a new file if it does not exist for the current date +2016-04-11 09:56:15 INFO DataMinerManagerServiceImpl:72 - DataMinerManager started! +2016-04-11 09:56:15 INFO SessionUtil:49 - no user found in session, use test user +2016-04-11 09:56:15 DEBUG ASLSession:302 - Scope is null, returning null +2016-04-11 09:56:15 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:56:15 INFO ASLSession:319 - The scope about to set is: /gcube/devsec/devVRE +2016-04-11 09:56:15 INFO ScopeProviderScanner:50 - using scope provider org.gcube.common.scope.impl.DefaultScopeProvider@e5fbf8b +2016-04-11 09:56:15 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:56:15 INFO ASLSession:352 - Logging the entrance +2016-04-11 09:56:15 DEBUG TemplateModel:82 - A new entry line has been created. The entry is: +2016-04-11 09:56:15 DEBUG TemplateModel:83 - 2016-04-11 09:56:15, VRE -> /gcube/devsec/devVRE, USER -> giancarlo.panichi, ENTRY_TYPE -> Login_To_VRE, MESSAGE -> First login to the vre + +2016-04-11 09:56:15 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:56:15 DEBUG DataMinerManagerServiceImpl:91 - UserInfo: UserInfo [username=giancarlo.panichi, groupId=0, groupName=null, scope=/gcube/devsec/devVRE, scopeName=/gcube/devsec/devVRE, userEmailAddress=null, userFullName=null] +2016-04-11 09:56:18 DEBUG ASLSession:125 - Could not parse file null/webapps/ROOT/WEB-INF/web.xml for session-timeout property. Parsing from jar. +2016-04-11 09:56:18 DEBUG ASLSession:141 - Could not parse file properties.xml for property. Setting it to default. +2016-04-11 09:56:18 INFO ASLSession:147 - Session Timeout is: 1800000 +2016-04-11 09:56:18 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:56:18 DEBUG ASLSession:458 - Getting security token: null in thread 30 +2016-04-11 09:56:18 INFO SessionUtil:74 - SessionUtil: aslSession giancarlo.panichi /gcube/devsec/devVRE +2016-04-11 09:56:18 INFO DiscoveryDelegate:108 - executing query for authorization-service/gcube/service endpoints: org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:56:18 INFO DefaultScanner:63 - matched 28 resources from 111 urls in 144 ms +2016-04-11 09:56:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/d4science.servicemap +2016-04-11 09:56:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/preprod.servicemap +2016-04-11 09:56:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/vo1.servicemap +2016-04-11 09:56:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/openbio.servicemap +2016-04-11 09:56:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/cnr.servicemap +2016-04-11 09:56:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/testing.servicemap +2016-04-11 09:56:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcube.servicemap +2016-04-11 09:56:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/gcubeapps.servicemap +2016-04-11 09:56:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/isti.servicemap +2016-04-11 09:56:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/farm.servicemap +2016-04-11 09:56:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/ecosystem.servicemap +2016-04-11 09:56:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/securevo.servicemap +2016-04-11 09:56:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devsec.servicemap +2016-04-11 09:56:18 INFO ServiceMapScanner:52 - loading jar:file:/home/giancarlo/workspace2/data-miner-manager/target/data-miner-manager-1.0.0-SNAPSHOT/WEB-INF/lib/common-scope-maps-1.0.3-SNAPSHOT.jar!/devnext.servicemap +2016-04-11 09:56:19 INFO StubFactory:144 - fetching wsdl for XQueryAccessService at http://dlib01.isti.cnr.it:8080/wsrf/services/gcube/informationsystem/collector/XQueryAccess?wsdl +2016-04-11 09:56:19 INFO StubCache:63 - caching stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:56:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() +2016-04-11 09:56:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ScopeHandler@7dc739c1 +2016-04-11 09:56:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.TargetServiceHandler@10675ae4 +2016-04-11 09:56:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.ClientInfoHandler@51e7020d +2016-04-11 09:56:19 INFO HandlerRegistry:30 - loaded call handler org.gcube.common.clients.stubs.jaxws.handlers.LegacyWSAddressingHandler@1a28537d +2016-04-11 09:56:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text() in 159 ms +2016-04-11 09:56:19 INFO DiscoveryDelegate:133 - calling authorization-service/gcube/service @
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
+2016-04-11 09:56:19 DEBUG DefaultEndpointCache:51 - caching
http://node27.d.d4science.research-infrastructures.eu:8080/authorization-service/gcube/service
for Key [name=authorization-service/gcube/service, query=org.gcube.resources.discovery.client.queries.impl.XQuery@8a6a1908=declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RunningInstance')//Document/Data/ic:Profile/Resource, $entry in $resource/Profile/AccessPoint/RunningInstanceInterfaces/Endpoint where ($resource/Profile/ServiceClass/text() eq 'Common') and ($resource/Profile/ServiceName/text() eq 'AuthorizationService') and ($resource/Profile/DeploymentData/Status/text() eq 'ready') and (contains($entry/string(),'authorization-service/gcube/service')) return $entry/text(), scope=/gcube/devsec/devVRE] +2016-04-11 09:56:19 INFO SessionUtil:93 - received token: f0666597-4302-49ce-bea2-555b94e569cb +2016-04-11 09:56:19 DEBUG SClient4WPSBuilder:28 - Build SM4WPS +2016-04-11 09:56:19 DEBUG SClient4WPSBuilder:29 - ServiceCredential: ServiceCredential [username=giancarlo.panichi, scope=/gcube/devsec/devVRE, token=f0666597-4302-49ce-bea2-555b94e569cb] +2016-04-11 09:56:19 DEBUG DefaultScopeProvider:38 - setting scope /gcube/devsec/devVRE in thread 30 +2016-04-11 09:56:19 INFO StubCache:70 - using cached stub for interface org.gcube.resources.discovery.icclient.stubs.CollectorStub +2016-04-11 09:56:19 INFO ICClient:75 - executing query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() +2016-04-11 09:56:19 INFO ICClient:83 - executed query declare namespace ic = 'http://gcube-system.org/namespaces/informationsystem/registry'; for $resource in collection('/db/Profiles/RuntimeResource')//Document/Data/ic:Profile/Resource where ($resource/Profile/Category/text() eq 'DataAnalysis') and ($resource/Profile/Name/text() eq 'DataMiner') return $resource/Profile/AccessPoint/Interface/Endpoint/text() in 28 ms +2016-04-11 09:56:19 DEBUG SClient4WPS:112 - Service Address retrieved:[http://dataminer-d-d4s.d4science.org/wps/WebProcessingService] +2016-04-11 09:56:19 INFO SClient4WPS:117 - DataMiner service address found: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:56:19 DEBUG SClient4WPS:128 - Cancel computation servlet: http://dataminer-d-d4s.d4science.org/wps/CancelComputationServlet +2016-04-11 09:56:19 INFO StatWPSClientSession:84 - CONNECT +2016-04-11 09:56:20 INFO StatWPSClientSession:133 - service removed successfully: http://dataminer-d-d4s.d4science.org/wps/WebProcessingService +2016-04-11 09:56:20 DEBUG SClient4WPS:249 - OperatorClass: [OperatorsClassification [name=User Perspective, operatorCategories=[OperatorCategory [id=ANOMALIES_DETECTION, name=Anomalies Detection, briefDescription=ANOMALIES_DETECTION, description=ANOMALIES_DETECTION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=BAYESIAN_METHODS, name=Bayesian Methods, briefDescription=BAYESIAN_METHODS, description=BAYESIAN_METHODS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CHARTS, name=Charts, briefDescription=CHARTS, description=CHARTS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=CORRELATION_ANALYSIS, name=Correlation Analysis, briefDescription=CORRELATION_ANALYSIS, description=CORRELATION_ANALYSIS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=DATABASES, name=Databases, briefDescription=DATABASES, description=DATABASES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=FILTERING, name=Filtering, briefDescription=FILTERING, description=FILTERING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=GEO_PROCESSING, name=Geo Processing, briefDescription=GEO_PROCESSING, description=GEO_PROCESSING, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=MAPS, name=Maps, briefDescription=MAPS, description=MAPS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=OCCURRENCES, name=Occurrences, briefDescription=OCCURRENCES, description=OCCURRENCES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=PERFORMANCES_EVALUATION, name=Performances Evaluation, briefDescription=PERFORMANCES_EVALUATION, description=PERFORMANCES_EVALUATION, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=STOCK_ASSESSMENT, name=Stock Assessment, briefDescription=STOCK_ASSESSMENT, description=STOCK_ASSESSMENT, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TAXA, name=Taxa, briefDescription=TAXA, description=TAXA, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=TIME_SERIES, name=Time Series, briefDescription=TIME_SERIES, description=TIME_SERIES, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false]], hasImage=false], OperatorCategory [id=VESSELS, name=Vessels, briefDescription=VESSELS, description=VESSELS, operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false]], hasImage=false]], operators=[Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ABSENCE_CELLS_FROM_AQUAMAPS, name=Absence Cells From Aquamaps, briefDescription=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, description=An algorithm producing cells and features (HCAF) for a species containing absense points taken by an Aquamaps Distribution, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.BIONYM, name=Bionym, briefDescription=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=An algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.BIONYM_LOCAL, name=Bionym Local, briefDescription=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., description=A fast version of the algorithm implementing BiOnym, a flexible workflow approach to taxon name matching. The workflow allows to activate several taxa names matching algorithms and to get the list of possible transcriptions for a list of input raw species names with possible authorship indication., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.CMSY, name=Cmsy, briefDescription=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., description=An algorithm to estimate the Maximum Sustainable Yield from a catch statistic. If also a Biomass trend is provided, MSY estimation is provided also with higher precision. The method has been developed by R. Froese, G. Coro, N. Demirel and K. Kleisner., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.CSQUARE_COLUMN_CREATOR, name=Csquare Column Creator, briefDescription=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the CSquare codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.DBSCAN, name=Dbscan, briefDescription=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the density-based spatial clustering of applications with noise (DBSCAN) algorithm. A maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.DISCREPANCY_ANALYSIS, name=Discrepancy Analysis, briefDescription=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., description=An evaluator algorithm that compares two tables containing real valued vectors. It drives the comparison by relying on a geographical distance threshold and a threshold for K-Statistic., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ESRI_GRID_EXTRACTION, name=Esri Grid Extraction, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one ESRI GRID ASCII file containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR, name=Fao Ocean Area Column Creator, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude and latitude columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.FAO_OCEAN_AREA_COLUMN_CREATOR_FROM_QUADRANT, name=Fao Ocean Area Column Creator From Quadrant, briefDescription=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., description=An algorithm that adds a column containing the FAO Ocean Area codes associated to longitude, latitude and quadrant columns., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GENERIC_CHARTS, name=Generic Charts, briefDescription=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., description=An algorithm producing generic charts of attributes vs. quantities. Charts are displayed per quantity column. Histograms, Scattering and Radar charts are produced for the top ten quantities. A gaussian distribution reports overall statistics for the quantities., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.GEO_CHART, name=Geo Chart, briefDescription=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., description=An algorithm producing a charts that displays quantities as colors of countries. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.HCAF_FILTER, name=Hcaf Filter, briefDescription=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), description=An algorithm producing a HCAF table on a selected Bounding Box (default identifies Indonesia), operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.HRS, name=Hrs, briefDescription=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., description=An evaluator algorithm that calculates the Habitat Representativeness Score, i.e. an indicator of the assessment of whether a specific survey coverage or another environmental features dataset, contains data that are representative of all available habitat variable combinations in an area., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.generators.ICCAT_VPA, name=Iccat Vpa, briefDescription=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., description=An algorithm for stock assessment of catch statistics published by the International Commission for the Conservation of Atlantic Tunas (ICCAT). Produces summary statistics about a stock, involving assessment of fishing mortality, abundance, catch trend, fecundity and recruitment. Developed by IFREMER and IRD. Contact persons: Sylvain Bonhommeau sylvain.bonhommeau@ifremer.fr, Julien Barde julien.barde@ird.fr., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.KMEANS, name=Kmeans, briefDescription=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., description=A clustering algorithm for real valued vectors that relies on the k-means algorithm, i.e. a method aiming to partition n observations into k clusters in which each observation belongs to the cluster with the nearest mean, serving as a prototype of the cluster. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.LOF, name=Lof, briefDescription=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., description=Local Outlier Factor (LOF). A clustering algorithm for real valued vectors that relies on Local Outlier Factor algorithm, i.e. an algorithm for finding anomalous data points by measuring the local deviation of a given data point with respect to its neighbours. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.MAPS_COMPARISON, name=Maps Comparison, briefDescription=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., description=An algorithm for comparing two OGC/NetCDF maps in seamless way to the user. The algorithm assesses the similarities between two geospatial maps by comparing them in a point-to-point fashion. It accepts as input the two geospatial maps (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) and some parameters affecting the comparison such as the z-index, the time index, the comparison threshold. Note: in the case of WFS layers it makes comparisons on the last feature column., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.MAX_ENT_NICHE_MODELLING, name=Max Ent Niche Modelling, briefDescription=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, description=A Maximum-Entropy model for species habitat modeling, based on the implementation by Shapire et al. v 3.3.3k, Princeton University, http://www.cs.princeton.edu/schapire/maxent/. In this adaptation for the D4Science infrastructure, the software accepts a table produced by the Species Product Discovery service and a set of environmental layers in various formats (NetCDF, WFS, WCS, ASC, GeoTiff) via direct links or GeoExplorer UUIDs. The user can also establish the bounding box and the spatial resolution (in decimal deg.) of the training and the projection. The application will adapt the layers to that resolution if this is higher than the native one.The output contains: a thumbnail map of the projected model, the ROC curve, the Omission/Commission chart, a table containing the raw assigned values, a threshold to transform the table into a 0-1 probability distribution, a report of the importance of the used layers in the model, ASCII representations of the input layers to check their alignment.Other processes can be later applied to the raw values to produce a GIS map (e.g. the Statistical Manager Points-to-Map process) and results can be shared. Demo video: http://goo.gl/TYYnTO and instructions http://wiki.i-marine.eu/index.php/MaxEnt, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.OCCURRENCE_ENRICHMENT, name=Occurrence Enrichment, briefDescription=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., description=An algorithm performing occurrences enrichment. Takes as input one table containing occurrence points for a set of species and a list of environmental layer, taken either from the e-infrastructure GeoNetwork (through the GeoExplorer application) or from direct HTTP links. Produces one table reporting the set of environmental values associated to the occurrence points., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.PRESENCE_CELLS_GENERATION, name=Presence Cells Generation, briefDescription=An algorithm producing cells and features (HCAF) for a species containing presence points, description=An algorithm producing cells and features (HCAF) for a species containing presence points, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.evaluators.QUALITY_ANALYSIS, name=Quality Analysis, briefDescription=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, description=An evaluator algorithm that assesses the effectiveness of a distribution model by computing the Receiver Operating Characteristics (ROC), the Area Under Curve (AUC) and the Accuracy of a model, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.RASTER_DATA_PUBLISHER, name=Raster Data Publisher, briefDescription=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., description=This algorithm publishes a raster file as a maps or datasets in the e-Infrastructure. NetCDF-CF files are encouraged, as WMS and WCS maps will be produced using this format. For other types of files (GeoTiffs, ASC etc.) only the raw datasets will be published. The resulting map or dataset will be accessible via the VRE GeoExplorer by the VRE participants., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SGVM_INTERPOLATION, name=Sgvm Interpolation, briefDescription=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, description=An interpolation method relying on the implementation by the Study Group on VMS (SGVMS). The method uses two interpolation approached to simulate vessels points at a certain temporal resolution. The input is a file in TACSAT format uploaded on the Statistical Manager. The output is another TACSAT file containing interpolated points.The underlying R code has been extracted from the SGVM VMSTools framework. This algorithm comes after a feasibility study (http://goo.gl/risQre) which clarifies the features an e-Infrastructure adds to the original scripts. Limitation: the input will be processed up to 10000 vessels trajectory points. Credits: Hintzen, N. T., Bastardie, F., Beare, D., Piet, G. J., Ulrich, C., Deporte, N., Egekvist, J., et al. 2012. VMStools: Open-source software for the processing, analysis and visualisation of fisheries logbook and VMS data. Fisheries Research, 115-116: 31-43. Hintzen, N. T., Piet, G. J., and Brunel, T. 2010. Improved estimation of trawling tracks using cubic Hermite spline interpolation of position registration data. Fisheries Research, 101: 108-115. VMStools, available as an add-on package for R. Documentation available at https://code.google.com/p/vmstools/. Build versions of VMStools for Window, Mac, Linux available at https://docs.google.com/. Authors: Niels T. Hintzen, Doug Beare, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.SUBMITQUERY, name=Submitquery, briefDescription=Algorithm that allows to submit a query, description=Algorithm that allows to submit a query, operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_GEO_CHART, name=Time Geo Chart, briefDescription=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., description=An algorithm producing an animated gif displaying quantities as colors in time. The color indicates the sum of the values recorded in a country., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_ANALYSIS, name=Time Series Analysis, briefDescription=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., description=An algorithms applying signal processing to a non uniform time series. A maximum of 10000 distinct points in time is allowed to be processed. The process uniformly samples the series, then extracts hidden periodicities and signal properties. The sampling period is the shortest time difference between two points. Finally, by using Caterpillar-SSA the algorithm forecasts the Time Series. The output shows the detected periodicity, the forecasted signal and the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIME_SERIES_CHARTS, name=Time Series Charts, briefDescription=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., description=An algorithm producing time series charts of attributes vs. quantities. Charts are displayed per quantity column and superposing quantities are summed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION, name=Timeextraction, briefDescription=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.TIMEEXTRACTION_TABLE, name=Timeextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.clusterers.XMEANS, name=Xmeans, briefDescription=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., description=A clustering algorithm for occurrence points that relies on the X-Means algorithm, i.e. an extended version of the K-Means algorithm improved by an Improve-Structure part. A Maximum of 4000 points is allowed., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR, name=Xyextractor, briefDescription=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to an environmental feature repository (e.g. NETCDF, ASC, GeoTiff files etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial repository ID (via their UUIDs in the infrastructure spatial data repository - recoverable through the Geoexplorer portlet) or a direct link to a file and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.XYEXTRACTOR_TABLE, name=Xyextractor Table, briefDescription=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., description=An algorithm to extract values associated to a table containing geospatial features (e.g. Vessel Routes, Species distribution maps etc. ). A grid of points at a certain resolution is specified by the user and values are associated to the points from the environmental repository. It accepts as one geospatial table and the specification about time and space. The algorithm produces one table containing the values associated to the selected bounding box., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION, name=Zextraction, briefDescription=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., description=An algorithm to extract the Z values from a geospatial features repository (e.g. NETCDF, ASC, GeoTiff files etc. ). The algorithm analyses the repository and automatically extracts the Z values according to the resolution wanted by the user. It produces one chart of the Z values and one table containing the values., operatorParameters=[], hasImage=false], Operator [id=org.gcube.dataanalysis.wps.statisticalmanager.synchserver.mappedclasses.transducerers.ZEXTRACTION_TABLE, name=Zextraction Table, briefDescription=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., description=An algorithm to extract a time series of values associated to a table containing geospatial information. The algorithm analyses the time series and automatically searches for hidden periodicities. It produces one chart of the time series, one table containing the time series values and possibly the spectrogram., operatorParameters=[], hasImage=false]]]]